From 67d6e1f75786fe15e9fb08e83304f6a1f8634622 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 30 May 2023 14:22:45 -0600 Subject: [PATCH 001/109] build(cosmos): make Node.js addon tolerate relocation --- golang/cosmos/Makefile | 1 - golang/cosmos/binding.gyp.in | 33 +++++++++++++++++++++++++++++---- 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/golang/cosmos/Makefile b/golang/cosmos/Makefile index 2e4250d5859..c0db4e96ad5 100644 --- a/golang/cosmos/Makefile +++ b/golang/cosmos/Makefile @@ -76,7 +76,6 @@ compile-helper: go-mod-cache compile-libdaemon: go-mod-cache go build -v $(MOD_READONLY) $(BUILD_FLAGS) -buildmode=c-shared -o build/libagcosmosdaemon.so ./cmd/libdaemon/main.go - test "`uname -s 2>/dev/null`" != Darwin || install_name_tool -id $$PWD/build/libagcosmosdaemon.so build/libagcosmosdaemon.so go-mod-cache: ../../go.sum @echo "--> Download go modules to local cache" diff --git a/golang/cosmos/binding.gyp.in b/golang/cosmos/binding.gyp.in index 2c08dd5b535..84644a1ca0f 100644 --- a/golang/cosmos/binding.gyp.in +++ b/golang/cosmos/binding.gyp.in @@ -2,6 +2,9 @@ "targets": [ { "target_name": "agcosmosdaemon", + 'variables': { + "target_lib": "lib<(_target_name).so", + }, "cflags!": [ "-fno-exceptions" ], "cflags_cc!": [ "-fno-exceptions" ], "xcode_settings": { @@ -18,17 +21,39 @@ " Date: Mon, 5 Jun 2023 09:56:25 -0600 Subject: [PATCH 002/109] chore(golang): move all Go files to `cosmos/golang` --- bin/agd | 2 +- golang/cosmos/Makefile | 4 ++-- go.mod => golang/cosmos/go.mod | 6 +++--- go.sum => golang/cosmos/go.sum | 0 packages/deployment/Dockerfile.sdk | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) rename go.mod => golang/cosmos/go.mod (97%) rename go.sum => golang/cosmos/go.sum (100%) diff --git a/bin/agd b/bin/agd index 69812305f85..7ffc9198170 100755 --- a/bin/agd +++ b/bin/agd @@ -148,7 +148,7 @@ fi print=() fi print+=( -print ) - src=$(find go.* "$GOLANG_DIR" \( -name '*.go' -o -name 'go.*' \) "${print[@]}" | head -1 || true) + src=$(find "$GOLANG_DIR" \( -name '*.go' -o -name 'go.*' \) "${print[@]}" | head -1 || true) test -z "$src" || { echo "At least $src is newer than $stamp" diff --git a/golang/cosmos/Makefile b/golang/cosmos/Makefile index c0db4e96ad5..1c561987607 100644 --- a/golang/cosmos/Makefile +++ b/golang/cosmos/Makefile @@ -77,11 +77,11 @@ compile-helper: go-mod-cache compile-libdaemon: go-mod-cache go build -v $(MOD_READONLY) $(BUILD_FLAGS) -buildmode=c-shared -o build/libagcosmosdaemon.so ./cmd/libdaemon/main.go -go-mod-cache: ../../go.sum +go-mod-cache: go.sum @echo "--> Download go modules to local cache" @go mod download -../../go.sum: ../../go.mod +go.sum: go.mod @echo "--> Ensure dependencies have not been modified" GO111MODULE=on go mod verify diff --git a/go.mod b/golang/cosmos/go.mod similarity index 97% rename from go.mod rename to golang/cosmos/go.mod index 6500f9f3156..03696a04b70 100644 --- a/go.mod +++ b/golang/cosmos/go.mod @@ -1,4 +1,4 @@ -module github.com/Agoric/agoric-sdk +module github.com/Agoric/agoric-sdk/golang/cosmos go 1.20 @@ -150,6 +150,6 @@ replace github.com/cosmos/cosmos-sdk => github.com/agoric-labs/cosmos-sdk v0.45. replace github.com/cosmos/gaia/v7 => github.com/Agoric/ag0/v7 v7.0.2-alpha.agoric.1 // For testing against a local cosmos-sdk or tendermint -// replace github.com/cosmos/cosmos-sdk => ../forks/cosmos-sdk +// replace github.com/cosmos/cosmos-sdk => ../../../forks/cosmos-sdk -// replace github.com/tendermint/tendermint => ../forks/tendermint +// replace github.com/tendermint/tendermint => ../../../forks/tendermint diff --git a/go.sum b/golang/cosmos/go.sum similarity index 100% rename from go.sum rename to golang/cosmos/go.sum diff --git a/packages/deployment/Dockerfile.sdk b/packages/deployment/Dockerfile.sdk index 3298108fdb0..190bd3ce121 100644 --- a/packages/deployment/Dockerfile.sdk +++ b/packages/deployment/Dockerfile.sdk @@ -3,7 +3,7 @@ FROM golang:1.20 as cosmos-go WORKDIR /usr/src/agoric-sdk/golang/cosmos -COPY go.mod go.sum ../../ +COPY golang/cosmos/go.mod golang/cosmos/go.sum ./ RUN go mod download COPY golang/cosmos ./ From a1d1666997c2f5b9c7bf14748f6d9603c0b3c5f9 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 5 Jun 2023 15:16:19 -0600 Subject: [PATCH 003/109] feat(agd): try harder to find cosmic-swingset --- golang/cosmos/cmd/agd/find_binary.go | 29 ++++++++++++++++++++------- golang/cosmos/cmd/agd/main.go | 5 ++--- golang/cosmos/package.json | 20 ++++-------------- packages/agoric-cli/src/helpers.js | 3 +-- packages/cosmic-swingset/package.json | 2 +- 5 files changed, 30 insertions(+), 29 deletions(-) diff --git a/golang/cosmos/cmd/agd/find_binary.go b/golang/cosmos/cmd/agd/find_binary.go index 6c23ed1fea4..336ee25a34c 100644 --- a/golang/cosmos/cmd/agd/find_binary.go +++ b/golang/cosmos/cmd/agd/find_binary.go @@ -8,14 +8,19 @@ import ( // FindBinaryNextToMe looks for binName next to the current executable. // It returns an absolute filename for binName, or an error. -func FindBinaryNextToMe(binName string) (string, error) { +func FindBinaryNextToMe(walkUp int, path... string) (string, error) { ex, err := os.Executable() if err != nil { return "", err } // Calculate the binary's filename. - bin := filepath.Join(filepath.Dir(ex), binName) + dir := ex + for i := 0; i <= walkUp; i++ { + dir = filepath.Dir(dir) + } + + bin := filepath.Join(append([]string{dir}, path...)...) // Take the absolute path. bin, err = filepath.Abs(bin) @@ -24,20 +29,30 @@ func FindBinaryNextToMe(binName string) (string, error) { } // Check that the binary exists. - if _, err := os.Stat(bin); err != nil { + if _, err = os.Stat(bin); err != nil { return "", err } + return bin, nil } -// FindBinary looks for binName in the following locations: +// FindCosmicSwingsetBinary looks for binName in the following locations: // 1. The executable's directory -// 2. The system PATH +// 2. Walking up to ../../cosmic-swingset/src/entrypoint.js +// 3. Walking up to ../../../packages/cosmic-swingset/src/entrypoint.js +// 4. The system PATH // // It returns the absolute filename for binName if it is found, otherwise an // error. -func FindBinary(binName string) (string, error) { - if binary, err := FindBinaryNextToMe(binName); err == nil { +func FindCosmicSwingsetBinary() (string, error) { + binName := "ag-chain-cosmos" + if binary, err := FindBinaryNextToMe(0, binName); err == nil { + return binary, nil + } + if binary, err := FindBinaryNextToMe(2, "cosmic-swingset", "src", "entrypoint.js"); err == nil { + return binary, nil + } + if binary, err := FindBinaryNextToMe(3, "packages", "cosmic-swingset", "src", "entrypoint.js"); err == nil { return binary, nil } diff --git a/golang/cosmos/cmd/agd/main.go b/golang/cosmos/cmd/agd/main.go index 7e89b61cb1a..21f3a0db1ce 100644 --- a/golang/cosmos/cmd/agd/main.go +++ b/golang/cosmos/cmd/agd/main.go @@ -17,13 +17,12 @@ func main() { args := []string{"ag-chain-cosmos", "--home", gaia.DefaultNodeHome} args = append(args, os.Args[1:]...) - logger.Info("Start chain delegating to JS executable", "args", args) - - binary, lookErr := FindBinary(args[0]) + binary, lookErr := FindCosmicSwingsetBinary() if lookErr != nil { panic(lookErr) } + logger.Info("Start chain delegating to JS executable", "binary", binary, "args", args) execErr := syscall.Exec(binary, args, os.Environ()) if execErr != nil { panic(execErr) diff --git a/golang/cosmos/package.json b/golang/cosmos/package.json index 14315ee24be..60676206c74 100644 --- a/golang/cosmos/package.json +++ b/golang/cosmos/package.json @@ -11,18 +11,18 @@ }, "scripts": { "test": "exit 0", + "build:all": "make", "build:gyp": "make compile-gyp", "build:gyp-debug": "make compile-gyp GYP_DEBUG=--debug", "test:xs": "exit 0", + "prepack": "git rev-parse --short HEAD > git-revision.txt && rm -rf build", + "postpack": "git clean -f git-revision.txt", "build": "exit 0", "lint-fix": "yarn lint:eslint --fix", "lint": "eslint '**/*.{cjs,js}'" }, "dependencies": { - "bindings": "^1.2.1" - }, - "devDependencies": { - "esm": "agoric-labs/esm#Agoric-built", + "bindings": "^1.2.1", "napi-thread-safe-callback": "0.0.6", "node-addon-api": "^1.7.1" }, @@ -32,18 +32,6 @@ "url": "https://github.com/Agoric/agoric-sdk/issues" }, "homepage": "https://github.com/Agoric/agoric-sdk/tree/HEAD/golang/cosmos", - "files": [ - "Makefile*", - "app", - "binding.gyp.in", - "cmd", - "daemon", - "proto", - "scripts", - "src", - "third_party", - "x" - ], "publishConfig": { "access": "public" } diff --git a/packages/agoric-cli/src/helpers.js b/packages/agoric-cli/src/helpers.js index db9abdd0043..cbb9904108a 100644 --- a/packages/agoric-cli/src/helpers.js +++ b/packages/agoric-cli/src/helpers.js @@ -13,8 +13,7 @@ export const getSDKBinaries = ({ return { agSolo: new URL(`${jsPfx}/solo/src/entrypoint.js`, myUrl).pathname, agSoloBuild: ['yarn', '--cwd', xsnap, `build:from-env`], - cosmosChain: new URL(`${jsPfx}/cosmic-swingset/bin/ag-chain-cosmos`, myUrl) - .pathname, + cosmosChain: new URL(`${goPfx}/cosmos/build/agd`, myUrl).pathname, cosmosChainBuild: cosmosBuild, cosmosClientBuild: cosmosBuild, cosmosHelper: new URL(`${goPfx}/cosmos/build/agd`, myUrl).pathname, diff --git a/packages/cosmic-swingset/package.json b/packages/cosmic-swingset/package.json index 4e804413047..d14cc7ab655 100644 --- a/packages/cosmic-swingset/package.json +++ b/packages/cosmic-swingset/package.json @@ -4,7 +4,7 @@ "description": "Agoric's Cosmos blockchain integration", "type": "module", "bin": { - "ag-chain-cosmos": "./src/entrypoint.js" + "ag-chain-cosmos": "src/entrypoint.js" }, "main": "src/chain-main.js", "repository": "https://github.com/Agoric/agoric-sdk", From 7740bab612ba9ae66d745497db4a90971416412d Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Tue, 16 May 2023 22:07:14 +0000 Subject: [PATCH 004/109] feat: allow passing tmux flags --- packages/deployment/upgrade-test/Makefile | 7 ++++++- .../upgrade-test/upgrade-test-scripts/bash_entrypoint.sh | 8 +++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/packages/deployment/upgrade-test/Makefile b/packages/deployment/upgrade-test/Makefile index 33e0b3199aa..187174bce05 100644 --- a/packages/deployment/upgrade-test/Makefile +++ b/packages/deployment/upgrade-test/Makefile @@ -4,6 +4,11 @@ ifdef TARGET buildTargetFlag = --target $(TARGET) dockerLabel = $(TARGET) endif +ifdef TMUX_CC + tmuxCC=1 +else + tmuxCC=0 +endif @echo buildTargetFlag: $(buildTargetFlag) local_sdk: @@ -18,5 +23,5 @@ build_test: docker build --build-arg BOOTSTRAP_MODE=test --progress=plain $(buildTargetFlag) -t $(REPOSITORY):$(dockerLabel) -f Dockerfile upgrade-test-scripts run: - docker run --rm -it -e "DEST=1" -p 26656:26656 -p 26657:26657 -p 1317:1317 --entrypoint "/usr/src/agoric-sdk/upgrade-test-scripts/start_to_to.sh" -v "$${PWD}:/workspace" $(REPOSITORY):$(dockerLabel) + docker run --rm -it -e "DEST=1" -e "TMUX_USE_CC=$(tmuxCC)" -p 26656:26656 -p 26657:26657 -p 1317:1317 --entrypoint "/usr/src/agoric-sdk/upgrade-test-scripts/start_to_to.sh" -v "$${PWD}:/workspace" $(REPOSITORY):$(dockerLabel) diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/bash_entrypoint.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/bash_entrypoint.sh index 06dbbdb5918..9f0161bccad 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/bash_entrypoint.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/bash_entrypoint.sh @@ -2,6 +2,12 @@ cd /usr/src/agoric-sdk/ || exit 1 tmux -V || apt install -y tmux -tmux \ +if [[ $TMUX_USE_CC == "1" ]]; then + TMUX_FLAGS="-CC -u" +else + TMUX_FLAGS="" +fi + +tmux $TMUX_FLAGS \ new-session 'SLOGFILE=slog.slog ./upgrade-test-scripts/start_to_to.sh' \; \ new-window 'bash -i' From b2a6d200424cfeed5c9ba29639c097ea224e9e74 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 5 Jun 2023 16:00:21 -0600 Subject: [PATCH 005/109] ci(restore-golang): adjust path to `**/go.sum` --- .github/actions/restore-golang/action.yml | 4 +++- .github/workflows/test-golang.yml | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/actions/restore-golang/action.yml b/.github/actions/restore-golang/action.yml index e2524c71056..4fc1ce9f4fd 100644 --- a/.github/actions/restore-golang/action.yml +++ b/.github/actions/restore-golang/action.yml @@ -18,6 +18,7 @@ runs: submodules: 'true' - uses: actions/setup-go@v4 with: + cache-dependency-path: golang/cosmos/go.sum go-version: ${{ inputs.go-version }} - uses: kenchan0130/actions-system-info@master id: system-info @@ -26,10 +27,11 @@ runs: uses: actions/cache@v3 with: path: ${{ env.GOPATH }}/pkg/mod - key: ${{ runner.os }}-${{ runner.arch }}-${{ steps.system-info.outputs.release }}-go-${{ inputs.go-version }}-built-${{ hashFiles('go.sum') }} + key: ${{ runner.os }}-${{ runner.arch }}-${{ steps.system-info.outputs.release }}-go-${{ inputs.go-version }}-built-${{ hashFiles('golang/**/go.sum') }} restore-keys: | ${{ runner.os }}-${{ runner.arch }}-${{ steps.system-info.outputs.release }}-go-${{ inputs.go-version }}-built- - name: go mod download + working-directory: ./golang/cosmos run: go mod download shell: bash if: steps.cache.outputs.cache-hit != 'true' diff --git a/.github/workflows/test-golang.yml b/.github/workflows/test-golang.yml index 5661c7a0657..c5cba644049 100644 --- a/.github/workflows/test-golang.yml +++ b/.github/workflows/test-golang.yml @@ -20,7 +20,7 @@ jobs: with: go-version: '1.20' - name: go test - run: cd golang && go test -coverprofile=coverage.txt -covermode=atomic ./... + run: cd golang/cosmos && go test -coverprofile=coverage.txt -covermode=atomic ./... - uses: ./.github/actions/post-test if: (success() || failure()) continue-on-error: true From 3df4c839e0dc49b6223716625536386afcc9b846 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Wed, 17 May 2023 18:27:52 +0000 Subject: [PATCH 006/109] upgrade-test: tmux flag docs --- packages/deployment/upgrade-test/Readme.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/deployment/upgrade-test/Readme.md b/packages/deployment/upgrade-test/Readme.md index 9a66a76f604..7a58b757611 100644 --- a/packages/deployment/upgrade-test/Readme.md +++ b/packages/deployment/upgrade-test/Readme.md @@ -26,6 +26,19 @@ make build make run ``` +This will start a container with tmux, with the first window `0` being chain logs `agd start` and the second and current window `1` being a bash shell. You can navigate using `bind-key+B N` (assuming `bind-key` is CTRL/CMD) and N is the window. For more shortcuts see [tmux shortcuts & cheatsheet](https://gist.github.com/MohamedAlaa/2961058#list-all-shortcuts). + +The container and chain will halt once you detach from the session. + +### Using tmux control mode + +If you use [iTerm you can use tmux with native integration](https://iterm2.com/documentation-tmux-integration.html), called control mode, which will make your tmux session appear as a physical window. Pass `TMUX_CC=1`: + +```shell +TMUX_CC=1 make run +``` + +### Troubleshooting If you get an error about port 26656 already in use, you have a local chain running on your OS. If you run into other problems, you might have a local `agoric-sdk:latest` that From b9ed037718fe1f5b3e1f876b233611059b4d7618 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Thu, 18 May 2023 22:42:35 +0000 Subject: [PATCH 007/109] upgrade-test: isolate vaults-and-beyond env setup --- packages/deployment/upgrade-test/Dockerfile | 14 +-- .../agoric-upgrade-10/env_setup.sh | 86 +++++++++++++++++++ .../agoric-upgrade-11/env_setup.sh | 86 +++++++++++++++++++ .../upgrade-test-scripts/env_setup.sh | 82 ++---------------- 4 files changed, 188 insertions(+), 80 deletions(-) create mode 100644 packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh create mode 100644 packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index 57468df48aa..a75c4f69cc1 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -7,18 +7,18 @@ ENV UPGRADE_TO=agoric-upgrade-8 THIS_NAME=agoric-upgrade-7-2 BOOTSTRAP_MODE=${BO RUN echo "${BOOTSTRAP_MODE}" RUN mkdir -p /usr/src/agoric-sdk/upgrade-test-scripts WORKDIR /usr/src/agoric-sdk/ -COPY ./*.sh ./upgrade-test-scripts/ +COPY ./start_ag0.sh ./upgrade-test-scripts/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ SHELL ["/bin/bash", "-c"] RUN . ./upgrade-test-scripts/start_ag0.sh -ARG BOOTSTRAP_MODE ## this is agoric-upgrade-8 aka pismoA FROM ghcr.io/agoric/agoric-sdk:29 as agoric-upgrade-8 ARG BOOTSTRAP_MODE ENV THIS_NAME=agoric-upgrade-8 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ -COPY ./*.sh ./upgrade-test-scripts/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ COPY --from=agoric-upgrade-7-2 /root/.agoric /root/.agoric RUN chmod +x ./upgrade-test-scripts/*.sh @@ -32,7 +32,7 @@ ARG BOOTSTRAP_MODE ENV THIS_NAME=agoric-upgrade-8-1 UPGRADE_TO=agoric-upgrade-9 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ -COPY ./*.sh ./upgrade-test-scripts/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ COPY --from=agoric-upgrade-8 /root/.agoric /root/.agoric RUN chmod +x ./upgrade-test-scripts/*.sh @@ -46,7 +46,7 @@ ARG BOOTSTRAP_MODE ENV THIS_NAME=agoric-upgrade-9 UPGRADE_TO=agoric-upgrade-10 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ -COPY ./*.sh ./upgrade-test-scripts/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ COPY --from=agoric-upgrade-8-1 /root/.agoric /root/.agoric WORKDIR /usr/src/agoric-sdk/ @@ -62,7 +62,7 @@ ARG BOOTSTRAP_MODE ENV THIS_NAME=agoric-upgrade-10 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ -COPY ./*.sh ./upgrade-test-scripts/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ COPY --from=agoric-upgrade-9 /root/.agoric /root/.agoric RUN chmod +x ./upgrade-test-scripts/*.sh @@ -77,7 +77,7 @@ ENV THIS_NAME=agoric-upgrade-11 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} # this boot doesn't need an upgrade WORKDIR /usr/src/agoric-sdk/ -COPY ./*.sh ./upgrade-test-scripts/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ COPY --from=agoric-upgrade-10 /root/.agoric /root/.agoric RUN apt install -y tmux diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh new file mode 100644 index 00000000000..28fd8a876e1 --- /dev/null +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh @@ -0,0 +1,86 @@ +#!/bin/bash + +# agoric-upgrade-10 specific env here... +export USER2ADDR=$($binary keys show user2 -a --keyring-backend="test" 2> /dev/null) + +printKeys() { + echo "========== GOVERNANCE KEYS ==========" + echo "gov1: $GOV1ADDR" + cat ~/.agoric/gov1.key || true + echo "gov2: $GOV2ADDR" + cat ~/.agoric/gov2.key || true + echo "gov3: $GOV3ADDR" + cat ~/.agoric/gov3.key || true + echo "validator: $VALIDATORADDR" + cat ~/.agoric/validator.key || true + echo "user1: $USER1ADDR" + cat ~/.agoric/user1.key || true + echo "user2: $USER2ADDR" + cat ~/.agoric/user2.key || true + echo "========== GOVERNANCE KEYS ==========" +} + +pushPrice () { + echo ACTIONS pushPrice $1 + newPrice="${1:-10.00}" + for oracleNum in {1..2}; do + if [[ ! -e "$HOME/.agoric/lastOracle" ]]; then + echo "$GOV1ADDR" > "$HOME/.agoric/lastOracle" + fi + + lastOracle=$(cat "$HOME/.agoric/lastOracle") + nextOracle="$GOV1ADDR" + if [[ "$lastOracle" == "$GOV1ADDR" ]]; then + nextOracle="$GOV2ADDR" + fi + echo "Pushing Price from oracle $nextOracle" + + oid="${nextOracle}_ORACLE" + offer=$(mktemp -t pushPrice.XXX) + agops oracle pushPriceRound --price "$newPrice" --oracleAdminAcceptOfferId "${!oid}" >|"$offer" + sleep 1 + timeout --preserve-status 15 yarn run --silent agops perf satisfaction --from $nextOracle --executeOffer "$offer" --keyring-backend test + if [ $? -ne 0 ]; then + echo "WARNING: pushPrice for $nextOracle failed!" + fi + echo "$nextOracle" > "$HOME/.agoric/lastOracle" + done +} + + +# variant of pushPrice() that figures out which oracle to send from +# WIP because it doesn't always work +pushPriceOnce () { + echo ACTIONS pushPrice $1 + newPrice="${1:-10.00}" + timeout 3 agoric follow -lF :published.priceFeed.ATOM-USD_price_feed.latestRound -ojson > "$HOME/.agoric/latestRound-ATOM.json" + + lastStartedBy=$(jq -r .startedBy "$HOME/.agoric/latestRound-ATOM.json" || echo null) + echo lastStartedBy $lastStartedBy + nextOracle="ERROR" + # cycle to next among oracles (first of the two governance accounts) + case $lastStartedBy in + "$GOV1ADDR") nextOracle=$GOV2ADDR;; + "$GOV2ADDR") nextOracle=$GOV1ADDR;; + *) + echo last price was pushed by a different account, using GOV1 + nextOracle=$GOV1ADDR + ;; + esac + echo nextOracle $nextOracle + + adminOfferId="${nextOracle}_ORACLE" + + echo "Pushing Price from oracle $nextOracle with offer $adminOfferId" + + offer=$(mktemp -t pushPrice.XXX) + agops oracle pushPriceRound --price "$newPrice" --oracleAdminAcceptOfferId "${adminOfferId}" >|"$offer" + cat "$offer" + sleep 1 + timeout --preserve-status 15 yarn run --silent agops perf satisfaction --from $nextOracle --executeOffer "$offer" --keyring-backend test + if [ $? -eq 0 ]; then + echo SUCCESS + else + echo "ERROR: pushPrice failed (using $nextOracle)" + fi +} \ No newline at end of file diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh new file mode 100644 index 00000000000..11e8e98ba3f --- /dev/null +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh @@ -0,0 +1,86 @@ +#!/bin/bash + +# agoric-upgrade-11 specific env here... +export USER2ADDR=$($binary keys show user2 -a --keyring-backend="test" 2> /dev/null) + +printKeys() { + echo "========== GOVERNANCE KEYS ==========" + echo "gov1: $GOV1ADDR" + cat ~/.agoric/gov1.key || true + echo "gov2: $GOV2ADDR" + cat ~/.agoric/gov2.key || true + echo "gov3: $GOV3ADDR" + cat ~/.agoric/gov3.key || true + echo "validator: $VALIDATORADDR" + cat ~/.agoric/validator.key || true + echo "user1: $USER1ADDR" + cat ~/.agoric/user1.key || true + echo "user2: $USER2ADDR" + cat ~/.agoric/user2.key || true + echo "========== GOVERNANCE KEYS ==========" +} + +pushPrice () { + echo ACTIONS pushPrice $1 + newPrice="${1:-10.00}" + for oracleNum in {1..2}; do + if [[ ! -e "$HOME/.agoric/lastOracle" ]]; then + echo "$GOV1ADDR" > "$HOME/.agoric/lastOracle" + fi + + lastOracle=$(cat "$HOME/.agoric/lastOracle") + nextOracle="$GOV1ADDR" + if [[ "$lastOracle" == "$GOV1ADDR" ]]; then + nextOracle="$GOV2ADDR" + fi + echo "Pushing Price from oracle $nextOracle" + + oid="${nextOracle}_ORACLE" + offer=$(mktemp -t pushPrice.XXX) + agops oracle pushPriceRound --price "$newPrice" --oracleAdminAcceptOfferId "${!oid}" >|"$offer" + sleep 1 + timeout --preserve-status 15 yarn run --silent agops perf satisfaction --from $nextOracle --executeOffer "$offer" --keyring-backend test + if [ $? -ne 0 ]; then + echo "WARNING: pushPrice for $nextOracle failed!" + fi + echo "$nextOracle" > "$HOME/.agoric/lastOracle" + done +} + + +# variant of pushPrice() that figures out which oracle to send from +# WIP because it doesn't always work +pushPriceOnce () { + echo ACTIONS pushPrice $1 + newPrice="${1:-10.00}" + timeout 3 agoric follow -lF :published.priceFeed.ATOM-USD_price_feed.latestRound -ojson > "$HOME/.agoric/latestRound-ATOM.json" + + lastStartedBy=$(jq -r .startedBy "$HOME/.agoric/latestRound-ATOM.json" || echo null) + echo lastStartedBy $lastStartedBy + nextOracle="ERROR" + # cycle to next among oracles (first of the two governance accounts) + case $lastStartedBy in + "$GOV1ADDR") nextOracle=$GOV2ADDR;; + "$GOV2ADDR") nextOracle=$GOV1ADDR;; + *) + echo last price was pushed by a different account, using GOV1 + nextOracle=$GOV1ADDR + ;; + esac + echo nextOracle $nextOracle + + adminOfferId="${nextOracle}_ORACLE" + + echo "Pushing Price from oracle $nextOracle with offer $adminOfferId" + + offer=$(mktemp -t pushPrice.XXX) + agops oracle pushPriceRound --price "$newPrice" --oracleAdminAcceptOfferId "${adminOfferId}" >|"$offer" + cat "$offer" + sleep 1 + timeout --preserve-status 15 yarn run --silent agops perf satisfaction --from $nextOracle --executeOffer "$offer" --keyring-backend test + if [ $? -eq 0 ]; then + echo SUCCESS + else + echo "ERROR: pushPrice failed (using $nextOracle)" + fi +} \ No newline at end of file diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh index 21d91a9fd7a..7a691aecb7c 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh @@ -23,10 +23,6 @@ export GOV3ADDR=$($binary keys show gov3 -a --keyring-backend="test") export VALIDATORADDR=$($binary keys show validator -a --keyring-backend="test") export USER1ADDR=$($binary keys show user1 -a --keyring-backend="test") -if [[ $THIS_NAME == "agoric-upgrade-10" || $THIS_NAME == "agoric-upgrade-11" ]]; then - export USER2ADDR=$($binary keys show user2 -a --keyring-backend="test" 2> /dev/null) -fi - if [[ "$binary" == "agd" ]]; then # Support testnet addresses sed -i "s/agoric1ldmtatp24qlllgxmrsjzcpe20fvlkp448zcuce/$GOV1ADDR/g" /usr/src/agoric-sdk/packages/vats/*.json @@ -211,78 +207,9 @@ printKeys() { cat ~/.agoric/validator.key || true echo "user1: $USER1ADDR" cat ~/.agoric/user1.key || true - if [[ $THIS_NAME == "agoric-upgrade-10" || $THIS_NAME == "agoric-upgrade-11" ]]; then - cat ~/.agoric/user2.key || true - fi echo "========== GOVERNANCE KEYS ==========" } -echo ENV_SETUP finished - -pushPrice () { - echo ACTIONS pushPrice $1 - newPrice="${1:-10.00}" - for oracleNum in {1..2}; do - if [[ ! -e "$HOME/.agoric/lastOracle" ]]; then - echo "$GOV1ADDR" > "$HOME/.agoric/lastOracle" - fi - - lastOracle=$(cat "$HOME/.agoric/lastOracle") - nextOracle="$GOV1ADDR" - if [[ "$lastOracle" == "$GOV1ADDR" ]]; then - nextOracle="$GOV2ADDR" - fi - echo "Pushing Price from oracle $nextOracle" - - oid="${nextOracle}_ORACLE" - offer=$(mktemp -t pushPrice.XXX) - agops oracle pushPriceRound --price "$newPrice" --oracleAdminAcceptOfferId "${!oid}" >|"$offer" - sleep 1 - timeout --preserve-status 15 yarn run --silent agops perf satisfaction --from $nextOracle --executeOffer "$offer" --keyring-backend test - if [ $? -ne 0 ]; then - echo "WARNING: pushPrice for $nextOracle failed!" - fi - echo "$nextOracle" > "$HOME/.agoric/lastOracle" - done -} - - -# variant of pushPrice() that figures out which oracle to send from -# WIP because it doesn't always work -pushPriceOnce () { - echo ACTIONS pushPrice $1 - newPrice="${1:-10.00}" - timeout 3 agoric follow -lF :published.priceFeed.ATOM-USD_price_feed.latestRound -ojson > "$HOME/.agoric/latestRound-ATOM.json" - - lastStartedBy=$(jq -r .startedBy "$HOME/.agoric/latestRound-ATOM.json" || echo null) - echo lastStartedBy $lastStartedBy - nextOracle="ERROR" - # cycle to next among oracles (first of the two governance accounts) - case $lastStartedBy in - "$GOV1ADDR") nextOracle=$GOV2ADDR;; - "$GOV2ADDR") nextOracle=$GOV1ADDR;; - *) - echo last price was pushed by a different account, using GOV1 - nextOracle=$GOV1ADDR - ;; - esac - echo nextOracle $nextOracle - - adminOfferId="${nextOracle}_ORACLE" - - echo "Pushing Price from oracle $nextOracle with offer $adminOfferId" - - offer=$(mktemp -t pushPrice.XXX) - agops oracle pushPriceRound --price "$newPrice" --oracleAdminAcceptOfferId "${adminOfferId}" >|"$offer" - cat "$offer" - sleep 1 - timeout --preserve-status 15 yarn run --silent agops perf satisfaction --from $nextOracle --executeOffer "$offer" --keyring-backend test - if [ $? -eq 0 ]; then - echo SUCCESS - else - echo "ERROR: pushPrice failed (using $nextOracle)" - fi -} export USDC_DENOM="ibc/toyusdc" # Recent transfer to Emerynet @@ -293,3 +220,12 @@ if [[ "$BOOTSTRAP_MODE" == "main" ]]; then export ATOM_DENOM="ibc/BA313C4A19DFBF943586C0387E6B11286F9E416B4DD27574E6909CABE0E342FA" export PSM_PAIR="IST.USDC_axl" fi + +# additional env specific to a version +if test -f ./upgrade-test-scripts/$THIS_NAME/env_setup.sh; then + echo ENV_SETUP found $THIS_NAME specific env, importing... + . ./upgrade-test-scripts/$THIS_NAME/env_setup.sh + echo ENV_SETUP imported $THIS_NAME specific env +fi + +echo ENV_SETUP finished From 332f1c0c5b7a2e4d69c2434fa3f27c3d38011c2b Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 5 Jun 2023 16:28:16 -0600 Subject: [PATCH 008/109] ci(golangci-lint): implement depguard fix and adjust paths --- .github/workflows/golangci-lint.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index b59eaa15b37..9cb9c995468 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -24,15 +24,15 @@ jobs: go-version: '>=1.20' cache: false check-latest: true + # https://github.com/golangci/golangci-lint/issues/3862#issuecomment-1572973588 + - run: echo "GOROOT=$(go env GOROOT)" >> $GITHUB_ENV - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: - # golangci-lint version and command line arguments - # v1.52.3 has a bug that causes it to fail depguard with: - # level=error msg="[linter] depguard: create analyzer: - # couldn't expand $gostd: could not read GOROOT directory: - # open src: no such file or directory\nmust have an Allow and/or Deny package list" - version: 'v1.52.2' # FIXME: change back to latest when it is fixed + version: latest args: --timeout=3m # for pull requests, show only new issues - only-new-issues: true + # Too bad it's incompatible with working-directory. + # only-new-issues: true + only-new-issues: false + working-directory: ./golang/cosmos From 07c6cf20e622b9dfeaaf0187eae1faf6bf5233e8 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Mon, 5 Jun 2023 11:44:54 -0400 Subject: [PATCH 009/109] Update Readme.md --- packages/deployment/upgrade-test/Readme.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/deployment/upgrade-test/Readme.md b/packages/deployment/upgrade-test/Readme.md index 7a58b757611..3c6176a5f9e 100644 --- a/packages/deployment/upgrade-test/Readme.md +++ b/packages/deployment/upgrade-test/Readme.md @@ -38,6 +38,8 @@ If you use [iTerm you can use tmux with native integration](https://iterm2.com/d TMUX_CC=1 make run ``` +**Note:** If your terminal does not support control mode, do not use this. It will show raw control codes, garbling your terminal. + ### Troubleshooting If you get an error about port 26656 already in use, you have a local chain running on your OS. From d90563a570e981ccfd9fcd8e0ac9b7aa2e0fc9ce Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 5 Jun 2023 18:21:21 -0600 Subject: [PATCH 010/109] ci(deployment): fix paths for local validator instance --- packages/deployment/docker/ag-setup-cosmos | 1 + packages/deployment/scripts/integration-test.sh | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/deployment/docker/ag-setup-cosmos b/packages/deployment/docker/ag-setup-cosmos index 8bf3694c0e3..0f2466ed778 100755 --- a/packages/deployment/docker/ag-setup-cosmos +++ b/packages/deployment/docker/ag-setup-cosmos @@ -25,6 +25,7 @@ exec docker run --rm $TTY $FLAGS \ --volume=ag-setup-cosmos-chains:/data/chains \ --volume=ag-chain-cosmos-state:/root/.ag-chain-cosmos \ --volume=/var/run/docker.sock:/var/run/docker.sock \ + --env AGD_HOME=/root/.ag-chain-cosmos \ --env AG_SETUP_COSMOS_NAME=$NETWORK_NAME \ --env AG_SETUP_COSMOS_HOME=$SETUP_HOME \ --env AG_SETUP_COSMOS_BACKEND=$AG_SETUP_COSMOS_BACKEND \ diff --git a/packages/deployment/scripts/integration-test.sh b/packages/deployment/scripts/integration-test.sh index 542322aa13b..dbac085d216 100755 --- a/packages/deployment/scripts/integration-test.sh +++ b/packages/deployment/scripts/integration-test.sh @@ -54,6 +54,7 @@ then "$AG_SETUP_COSMOS_HOME/faucet-helper.sh" add-egress loadgen "$SOLO_ADDR" SLOGSENDER=@agoric/telemetry/src/otel-trace.js SOLO_SLOGSENDER= \ SLOGSENDER_FAIL_ON_ERROR=1 SLOGSENDER_AGENT=process \ + AG_CHAIN_COSMOS_HOME=$HOME/.agoric \ SDK_BUILD=0 MUST_USE_PUBLISH_BUNDLE=1 SDK_SRC=/usr/src/agoric-sdk OUTPUT_DIR="$RESULTSDIR" ./start.sh \ --no-stage.save-storage \ --stages=3 --stage.duration=10 --stage.loadgen.cycles=4 \ From d19e630fd64fbd122a3116d80109a8b47dc1e3ae Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 13 Jun 2023 12:59:43 -0600 Subject: [PATCH 011/109] build(xsnap): rebuild on postinstall --- packages/xsnap/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/xsnap/package.json b/packages/xsnap/package.json index 4cc651c1395..9bd066e8339 100644 --- a/packages/xsnap/package.json +++ b/packages/xsnap/package.json @@ -16,6 +16,7 @@ "build:env": "if git status >/dev/null 2>&1; then node src/build.js --show-env > build.env; fi", "build:from-env": "{ cat build.env; echo node src/build.js; } | xargs env", "build": "yarn build:bin && yarn build:env", + "postinstall": "yarn build:from-env", "clean": "rm -rf xsnap-native/xsnap/build", "lint": "run-s --continue-on-error lint:*", "lint:js": "eslint 'src/**/*.js' 'test/**/*.js' api.js", From 22a675e6a994d2b2e12e5f37bfc32987f3c33e96 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Fri, 2 Jun 2023 14:34:43 -0700 Subject: [PATCH 012/109] chore(bin/agd): get expected xsnap version from repoconfig.sh instead of using a baked-in constant. Thanks to mfig for the suggestion. --- bin/agd | 13 +++++++------ repoconfig.sh | 1 + 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/bin/agd b/bin/agd index 7ffc9198170..e6a922b3476 100755 --- a/bin/agd +++ b/bin/agd @@ -184,11 +184,6 @@ fi } ) -if $BUILD_ONLY; then - echo "Build complete." 1>&2 - exit 0 -fi - # the xsnap binary lives in a platform-specific directory unameOut="$(uname -s)" case "${unameOut}" in @@ -196,9 +191,15 @@ case "${unameOut}" in Darwin*) platform=mac;; *) platform=win;; esac + # check the xsnap version against our baked-in notion of what version we should be using xsnap_version=$("${thisdir}/../packages/xsnap/xsnap-native/xsnap/build/bin/${platform}/release/xsnap-worker" -n) -[[ "${xsnap_version}" == "agoric-upgrade-10" ]] || fatal "xsnap out of date" +[[ "${xsnap_version}" == "${XSNAP_VERSION}" ]] || fatal "xsnap version mismatch; expected ${XSNAP_VERSION}, got ${xsnap_version}" + +if $BUILD_ONLY; then + echo "Build complete." 1>&2 + exit 0 +fi # Run the built Cosmos daemon. # shellcheck disable=SC2031 diff --git a/repoconfig.sh b/repoconfig.sh index ab34b7243fc..755ca3d1b1b 100644 --- a/repoconfig.sh +++ b/repoconfig.sh @@ -4,6 +4,7 @@ NODEJS_VERSION=v16 GOLANG_VERSION=1.20.3 GOLANG_DIR=golang/cosmos GOLANG_DAEMON=$GOLANG_DIR/build/agd +XSNAP_VERSION=agoric-upgrade-10 # Args are major, minor and patch version numbers function golang_version_check() { From 8b61b0ec9374c11861b3a47be9cb4008def93a6a Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 18 Jun 2023 15:11:14 -0600 Subject: [PATCH 013/109] build(deployment): use `bullseye` until `bookworm` bakes a bit --- packages/deployment/Dockerfile | 2 +- packages/deployment/Dockerfile.sdk | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/deployment/Dockerfile b/packages/deployment/Dockerfile index 75925e15b8c..f490a11c58c 100644 --- a/packages/deployment/Dockerfile +++ b/packages/deployment/Dockerfile @@ -4,7 +4,7 @@ ARG REGISTRY=ghcr.io # FIXME: Journalbeat compilation is currently broken, but non-essential. # Removed from the build. -# FROM golang:1.20 AS go-build +# FROM golang:1.20-bullseye AS go-build # WORKDIR /usr/src/journalbeat # RUN apt-get update -y && apt-get install -y libsystemd-dev diff --git a/packages/deployment/Dockerfile.sdk b/packages/deployment/Dockerfile.sdk index f2db099e544..6f472bf2afa 100644 --- a/packages/deployment/Dockerfile.sdk +++ b/packages/deployment/Dockerfile.sdk @@ -1,6 +1,6 @@ ########################### # The golang build container -FROM golang:1.20 as cosmos-go +FROM golang:1.20-bullseye as cosmos-go WORKDIR /usr/src/agoric-sdk/golang/cosmos COPY golang/cosmos/go.mod golang/cosmos/go.sum ./ @@ -16,7 +16,7 @@ RUN make GIT_COMMIT="$GIT_COMMIT" GIT_REVISION="$GIT_REVISION" MOD_READONLY= com # OTEL fetch # from https://github.com/open-telemetry/opentelemetry-collector-releases/releases -FROM node:lts AS otel +FROM node:lts-bullseye AS otel ARG OTEL_VERSION=0.48.0 ARG OTEL_HASH_arm64=846852f4c34f6e494abe202402fdf1d17e2ec3c7a7f96985b6011126ae553249 @@ -32,7 +32,7 @@ RUN set -eux; \ ############################### # The js build container -FROM node:lts AS build-js +FROM node:lts-bullseye AS build-js # When changing/adding entries here, make sure to search the whole project for # `@@AGORIC_DOCKER_SUBMODULES@@` @@ -68,7 +68,7 @@ RUN rm -rf packages/xsnap/moddable ############################### # The install container. -FROM node:lts AS install +FROM node:lts-bullseye AS install # Install some conveniences. RUN apt-get --allow-releaseinfo-change update && apt-get install -y vim jq less && apt-get clean -y From 208cfa1c908d75d5292b15ba5cfc666296f2667e Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 13 Jun 2023 22:12:13 -0600 Subject: [PATCH 014/109] build(cosmos): clarify `install_name_tool` postbuild name --- golang/cosmos/binding.gyp.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/golang/cosmos/binding.gyp.in b/golang/cosmos/binding.gyp.in index 84644a1ca0f..1b95d034276 100644 --- a/golang/cosmos/binding.gyp.in +++ b/golang/cosmos/binding.gyp.in @@ -35,7 +35,7 @@ ], 'postbuilds': [ { - 'postbuild_name': 'Add prefix @rpath/<(target_lib)', + 'postbuild_name': 'Adding @rpath prefix to <(target_lib)', 'action': [ 'install_name_tool', '-change', From c631bc54780acca2ab84ae88065ac31de35b0049 Mon Sep 17 00:00:00 2001 From: Dan Connolly Date: Tue, 13 Jun 2023 11:55:29 -0500 Subject: [PATCH 015/109] test: DeliverInbound from un-provisioned account is discarded --- .../agoric-upgrade-10/actions.sh | 3 +++ .../agoric-upgrade-10/env_setup.sh | 24 ++++++++++++++++++- .../agoric-upgrade-10/test.sh | 5 ++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh index 75a27d1de9c..1654578be83 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh @@ -10,6 +10,9 @@ set -x # agoric wallet show --from $GOV1ADDR waitForBlock 20 +# user1 has no mailbox provisioned; later we test that this was discarded +submitDeliverInbound user1 + # provision a new user wallet agd keys add user2 --keyring-backend=test 2>&1 | tee "$HOME/.agoric/user2.out" diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh index 28fd8a876e1..298875e91e6 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh @@ -83,4 +83,26 @@ pushPriceOnce () { else echo "ERROR: pushPrice failed (using $nextOracle)" fi -} \ No newline at end of file +} + +# submit a DeliverInbound transaction +# +# see {agoric.swingset.MsgDeliverInbound} in swingset/msgs.proto +# https://github.com/Agoric/agoric-sdk/blob/5cc5ec8836dcd0c6e11b10799966b6e74601295d/golang/cosmos/proto/agoric/swingset/msgs.proto#L23 +submitDeliverInbound() { + sender="${1:-user1}" + + # ag-solo is a client that sends DeliverInbound transactions using a golang client + # @see {connectToChain} in chain-cosmos-sdk.js + # runHelper + # https://github.com/Agoric/agoric-sdk/blob/5cc5ec8836dcd0c6e11b10799966b6e74601295d/packages/solo/src/chain-cosmos-sdk.js + + # The payload is JSON.stringify([messages, highestAck]) + # https://github.com/Agoric/agoric-sdk/blob/5cc5ec8836dcd0c6e11b10799966b6e74601295d/packages/solo/src/chain-cosmos-sdk.js#L625 + # for example, this json was captured from a running `agoric start local-solo` + json='[[[1,"1:0:deliver:ro+1:rp-44;#[\"getConfiguration\",[]]"]],0]' + + agd tx swingset deliver "${json}" \ + --chain-id="$CHAINID" -ojson --yes \ + --from="$sender" --keyring-backend=test -b block +} diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh index 34b83c16ad5..3f756b3ee77 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh @@ -2,6 +2,11 @@ . ./upgrade-test-scripts/env_setup.sh +# DeliverInbound from un-provisioned account is discarded +# Note: sending to a provisioned account resulted in an .outbox of +# [[1,"1:1:resolve:fulfill:rp+44:ro-20;#\"$0.Alleged: notifier\""]] +test_val $(agd query swingset mailbox $USER1ADDR -o json | jq '.value |fromjson |.outbox') '[]' "DeliverInbound (getConfiguration) is discarded" + # provision pool has right balance test_val $(agd query bank balances agoric1megzytg65cyrgzs6fvzxgrcqvwwl7ugpt62346 -o json | jq -r '.balances | first | .amount ') "18750000" From c3a4e0fc3c618baa3dcaf5aad06ac5d403603386 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Fri, 9 Jun 2023 20:59:35 -0600 Subject: [PATCH 016/109] test(cosmos): demonstrate Golang error frame divergence --- golang/cosmos/cmd/libdaemon/main_test.go | 29 ++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 golang/cosmos/cmd/libdaemon/main_test.go diff --git a/golang/cosmos/cmd/libdaemon/main_test.go b/golang/cosmos/cmd/libdaemon/main_test.go new file mode 100644 index 00000000000..4257bcd553b --- /dev/null +++ b/golang/cosmos/cmd/libdaemon/main_test.go @@ -0,0 +1,29 @@ +package main_test + +import ( + "fmt" + "strings" + "testing" + + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" +) + +func TestErrorStackTraces(t *testing.T) { + err := sdkerrors.Wrapf(sdkerrors.ErrInsufficientFee, "my error %d", 123) + expected := "fail: my error 123: insufficient fee" + + // Expected message only (what we want). + stringified := fmt.Errorf("fail: %s", err) + got := stringified.Error() + if got != expected { + t.Fatalf("stringified.Error() %q should be %q", got, expected) + } + + // Expected stack frame (though undesirable). + wrapped := fmt.Errorf("fail: %w", err) + got = wrapped.Error() + expectedAndStack := expected + " [" + if !strings.HasPrefix(got, expectedAndStack) { + t.Fatalf("wrapped.Error() %q should start with %q", got, expectedAndStack) + } +} From 355f205b4ff7cff5063e72b35ed79e206821514b Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Thu, 15 Jun 2023 11:52:25 -0600 Subject: [PATCH 017/109] fix(deployment): map Debian `bookworm` to Ubuntu `jammy` --- packages/deployment/scripts/install-deps.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/deployment/scripts/install-deps.sh b/packages/deployment/scripts/install-deps.sh index c63b88853b8..7221748619c 100755 --- a/packages/deployment/scripts/install-deps.sh +++ b/packages/deployment/scripts/install-deps.sh @@ -46,6 +46,9 @@ case $VERSION_CODENAME in bullseye) VERSION_CODENAME=focal ;; + bookworm) + VERSION_CODENAME=jammy + ;; esac # Install Ansible. From ee431121e3f93406896f8a9e7d949fbf1427c44e Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Thu, 15 Jun 2023 14:40:41 -0600 Subject: [PATCH 018/109] build!: remove deprecated `ag-cosmos-helper` --- golang/cosmos/Makefile | 14 ++++---------- golang/cosmos/cmd/helper/main.go | 21 --------------------- packages/cosmic-swingset/Makefile | 6 +----- packages/deployment/Dockerfile.sdk | 1 - 4 files changed, 5 insertions(+), 37 deletions(-) delete mode 100644 golang/cosmos/cmd/helper/main.go diff --git a/golang/cosmos/Makefile b/golang/cosmos/Makefile index 1c561987607..a5c9da82b07 100644 --- a/golang/cosmos/Makefile +++ b/golang/cosmos/Makefile @@ -38,15 +38,12 @@ ldflags += -compressdwarf=false gcflags += -N -l endif -ldflags_helper = $(ldflags) \ - -X github.com/cosmos/cosmos-sdk/version.AppName=ag-cosmos-helper BUILD_FLAGS := -tags "$(build_tags)" -gcflags '$(gcflags)' -ldflags '$(ldflags)' -BUILD_FLAGS_HELPER := -buildmode=exe -tags "$(build_tags)" -gcflags '$(gcflags)' -ldflags '$(ldflags_helper)' all: compile-chain -compile-chain: compile-agd compile-helper compile-daemon -compile-go: compile-agd compile-helper compile-libdaemon +compile-chain: compile-agd compile-daemon +compile-go: compile-agd compile-libdaemon compile-node: node-compile-gyp compile-daemon: compile-libdaemon node-compile-gyp @@ -60,10 +57,10 @@ node-compile-gyp: fi compile-agd: go-mod-cache - go build -v $(MOD_READONLY) $(BUILD_FLAGS_HELPER) -o build/agd ./cmd/agd + go build -v $(MOD_READONLY) $(BUILD_FLAGS) -buildmode=exe -o build/agd ./cmd/agd install-agd: go-mod-cache - go install -v $(MOD_READONLY) $(BUILD_FLAGS_HELPER) ./cmd/agd + go install -v $(MOD_READONLY) $(BUILD_FLAGS) -buildmode=exe ./cmd/agd # Only run from the package.json build:gyp script. compile-gyp: @@ -71,9 +68,6 @@ compile-gyp: node-gyp configure build $(GYP_DEBUG) || { status=$$?; rm -f binding.gyp; exit $$status; } rm -f binding.gyp -compile-helper: go-mod-cache - go build -v $(MOD_READONLY) $(BUILD_FLAGS_HELPER) -o build/ag-cosmos-helper ./cmd/helper - compile-libdaemon: go-mod-cache go build -v $(MOD_READONLY) $(BUILD_FLAGS) -buildmode=c-shared -o build/libagcosmosdaemon.so ./cmd/libdaemon/main.go diff --git a/golang/cosmos/cmd/helper/main.go b/golang/cosmos/cmd/helper/main.go deleted file mode 100644 index df68444e03f..00000000000 --- a/golang/cosmos/cmd/helper/main.go +++ /dev/null @@ -1,21 +0,0 @@ -package main - -import ( - "os" - "path/filepath" - - gaia "github.com/Agoric/agoric-sdk/golang/cosmos/app" - "github.com/Agoric/agoric-sdk/golang/cosmos/daemon" - daemoncmd "github.com/Agoric/agoric-sdk/golang/cosmos/daemon/cmd" -) - -func main() { - userHomeDir, err := os.UserHomeDir() - if err != nil { - panic(err) - } - - gaia.DefaultNodeHome = filepath.Join(userHomeDir, ".ag-cosmos-helper") - daemoncmd.AppName = "ag-cosmos-helper" - daemon.Run() -} diff --git a/packages/cosmic-swingset/Makefile b/packages/cosmic-swingset/Makefile index 7b8192ce8fa..be464ea28f6 100644 --- a/packages/cosmic-swingset/Makefile +++ b/packages/cosmic-swingset/Makefile @@ -81,7 +81,7 @@ all: build-chain install-nobuild client: build-helper install-nobuild -install-nobuild: install-local install-agd install-helper +install-nobuild: install-local install-agd install: all install-nobuild @@ -363,10 +363,6 @@ install-agd: mkdir -p "$(BIN)" ln -sf "$(SDK_ROOT)/bin/agd" "$(BIN)/agd" -install-helper: - mkdir -p "$(BIN)" - ln -sf "$(SDK_ROOT)/bin/agd" "$(BIN)/ag-cosmos-helper" - install-local: mkdir -p "$(BIN)" ln -sf "$(SDK_ROOT)/bin/agd" "$(BIN)/ag-chain-cosmos" diff --git a/packages/deployment/Dockerfile.sdk b/packages/deployment/Dockerfile.sdk index 190bd3ce121..f2db099e544 100644 --- a/packages/deployment/Dockerfile.sdk +++ b/packages/deployment/Dockerfile.sdk @@ -77,7 +77,6 @@ WORKDIR /usr/src COPY --from=build-js /usr/src/agoric-sdk agoric-sdk COPY --from=otel /otelcol-contrib /usr/local/bin/ RUN ln -s /usr/src/agoric-sdk/bin/agd /usr/local/bin/ -RUN ln -s /usr/src/agoric-sdk/bin/agd /usr/local/bin/ag-cosmos-helper RUN ln -s /usr/src/agoric-sdk/packages/cosmic-swingset/bin/ag-chain-cosmos /usr/local/bin/ RUN ln -s /usr/src/agoric-sdk/packages/solo/bin/ag-solo /usr/local/bin/ RUN ln -s /usr/src/agoric-sdk/packages/agoric-cli/bin/agoric /usr/local/bin/ From 1d8acf6270cadfbcdafb1081360155260d031ac1 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Fri, 9 Jun 2023 21:00:26 -0600 Subject: [PATCH 019/109] fix(cosmos): prevent Golang error wrapping stack frame divergence --- golang/cosmos/daemon/cmd/genaccounts.go | 20 ++++++++++---------- golang/cosmos/x/lien/lien.go | 14 +++++++------- golang/cosmos/x/lien/module.go | 2 +- golang/cosmos/x/swingset/abci.go | 2 +- golang/cosmos/x/vbank/vbank.go | 18 +++++++++--------- 5 files changed, 28 insertions(+), 28 deletions(-) diff --git a/golang/cosmos/daemon/cmd/genaccounts.go b/golang/cosmos/daemon/cmd/genaccounts.go index 3a4afdc9224..abc85cbc7d2 100644 --- a/golang/cosmos/daemon/cmd/genaccounts.go +++ b/golang/cosmos/daemon/cmd/genaccounts.go @@ -61,7 +61,7 @@ contain valid denominations. Accounts may optionally be supplied with vesting pa info, err := kb.Key(args[0]) if err != nil { - return fmt.Errorf("failed to get address from Keybase: %w", err) + return fmt.Errorf("failed to get address from Keybase: %s", err) } addr = info.GetAddress() @@ -69,7 +69,7 @@ contain valid denominations. Accounts may optionally be supplied with vesting pa coins, err := sdk.ParseCoinsNormalized(args[1]) if err != nil { - return fmt.Errorf("failed to parse coins: %w", err) + return fmt.Errorf("failed to parse coins: %s", err) } vestingStart, err := cmd.Flags().GetInt64(flagVestingStart) @@ -87,7 +87,7 @@ contain valid denominations. Accounts may optionally be supplied with vesting pa vestingAmt, err := sdk.ParseCoinsNormalized(vestingAmtStr) if err != nil { - return fmt.Errorf("failed to parse vesting amount: %w", err) + return fmt.Errorf("failed to parse vesting amount: %s", err) } // create concrete account type based on input parameters @@ -119,20 +119,20 @@ contain valid denominations. Accounts may optionally be supplied with vesting pa } if err := genAccount.Validate(); err != nil { - return fmt.Errorf("failed to validate new genesis account: %w", err) + return fmt.Errorf("failed to validate new genesis account: %s", err) } genFile := config.GenesisFile() appState, genDoc, err := genutiltypes.GenesisStateFromGenFile(genFile) if err != nil { - return fmt.Errorf("failed to unmarshal genesis state: %w", err) + return fmt.Errorf("failed to unmarshal genesis state: %s", err) } authGenState := authtypes.GetGenesisStateFromAppState(clientCtx.Codec, appState) accs, err := authtypes.UnpackAccounts(authGenState.Accounts) if err != nil { - return fmt.Errorf("failed to get accounts from any: %w", err) + return fmt.Errorf("failed to get accounts from any: %s", err) } if accs.Contains(addr) { @@ -146,13 +146,13 @@ contain valid denominations. Accounts may optionally be supplied with vesting pa genAccs, err := authtypes.PackAccounts(accs) if err != nil { - return fmt.Errorf("failed to convert accounts into any's: %w", err) + return fmt.Errorf("failed to convert accounts into any's: %s", err) } authGenState.Accounts = genAccs authGenStateBz, err := clientCtx.Codec.MarshalJSON(&authGenState) if err != nil { - return fmt.Errorf("failed to marshal auth genesis state: %w", err) + return fmt.Errorf("failed to marshal auth genesis state: %s", err) } appState[authtypes.ModuleName] = authGenStateBz @@ -164,14 +164,14 @@ contain valid denominations. Accounts may optionally be supplied with vesting pa bankGenStateBz, err := clientCtx.Codec.MarshalJSON(bankGenState) if err != nil { - return fmt.Errorf("failed to marshal bank genesis state: %w", err) + return fmt.Errorf("failed to marshal bank genesis state: %s", err) } appState[banktypes.ModuleName] = bankGenStateBz appStateJSON, err := json.Marshal(appState) if err != nil { - return fmt.Errorf("failed to marshal application genesis state: %w", err) + return fmt.Errorf("failed to marshal application genesis state: %s", err) } genDoc.AppState = appStateJSON diff --git a/golang/cosmos/x/lien/lien.go b/golang/cosmos/x/lien/lien.go index a2cd60dea75..3ac507d27fb 100644 --- a/golang/cosmos/x/lien/lien.go +++ b/golang/cosmos/x/lien/lien.go @@ -147,7 +147,7 @@ func (ch portHandler) handleGetStaking(ctx sdk.Context, msg portMessage) (string } bz, err := json.Marshal(&reply) if err != nil { - return "", fmt.Errorf("cannot marshal %v: %w", reply, err) + return "", fmt.Errorf("cannot marshal %v: %s", reply, err) } return string(bz), nil } @@ -157,11 +157,11 @@ func (ch portHandler) handleGetStaking(ctx sdk.Context, msg portMessage) (string func (ch portHandler) handleGetAccountState(ctx sdk.Context, msg portMessage) (string, error) { addr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return "", fmt.Errorf("cannot convert %s to address: %w", msg.Address, err) + return "", fmt.Errorf("cannot convert %s to address: %s", msg.Address, err) } denom := msg.Denom if err = sdk.ValidateDenom(denom); err != nil { - return "", fmt.Errorf("invalid denom %s: %w", denom, err) + return "", fmt.Errorf("invalid denom %s: %s", denom, err) } state := ch.keeper.GetAccountState(ctx, addr) reply := msgAccountState{ @@ -174,7 +174,7 @@ func (ch portHandler) handleGetAccountState(ctx sdk.Context, msg portMessage) (s } bz, err := json.Marshal(&reply) if err != nil { - return "", fmt.Errorf("cannot marshal %v: %w", reply, err) + return "", fmt.Errorf("cannot marshal %v: %s", reply, err) } return string(bz), nil } @@ -184,11 +184,11 @@ func (ch portHandler) handleGetAccountState(ctx sdk.Context, msg portMessage) (s func (ch portHandler) handleChangeLiened(ctx sdk.Context, msg portMessage) (string, error) { addr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return "", fmt.Errorf("cannot convert %s to address: %w", msg.Address, err) + return "", fmt.Errorf("cannot convert %s to address: %s", msg.Address, err) } denom := msg.Denom if err = sdk.ValidateDenom(denom); err != nil { - return "", fmt.Errorf("invalid denom %s: %w", denom, err) + return "", fmt.Errorf("invalid denom %s: %s", denom, err) } newAmt, err := ch.keeper.ChangeLien(ctx, addr, denom, msg.Delta) @@ -197,7 +197,7 @@ func (ch portHandler) handleChangeLiened(ctx sdk.Context, msg portMessage) (stri } bz, err := json.Marshal(&newAmt) if err != nil { - return "", fmt.Errorf("cannot marshal %v: %w", newAmt, err) + return "", fmt.Errorf("cannot marshal %v: %s", newAmt, err) } return string(bz), nil } diff --git a/golang/cosmos/x/lien/module.go b/golang/cosmos/x/lien/module.go index d2eceea250c..b8108a896a1 100644 --- a/golang/cosmos/x/lien/module.go +++ b/golang/cosmos/x/lien/module.go @@ -45,7 +45,7 @@ func (AppModuleBasic) DefaultGenesis(cdc codec.JSONCodec) json.RawMessage { func (AppModuleBasic) ValidateGenesis(cdc codec.JSONCodec, config client.TxEncodingConfig, data json.RawMessage) error { var genesisState types.GenesisState if err := cdc.UnmarshalJSON(data, &genesisState); err != nil { - return fmt.Errorf("failed to unmarshal %s genesis state: %w", types.ModuleName, err) + return fmt.Errorf("failed to unmarshal %s genesis state: %s", types.ModuleName, err) } return ValidateGenesis(genesisState) } diff --git a/golang/cosmos/x/swingset/abci.go b/golang/cosmos/x/swingset/abci.go index 96f00a3011f..ec9077f30f8 100644 --- a/golang/cosmos/x/swingset/abci.go +++ b/golang/cosmos/x/swingset/abci.go @@ -115,7 +115,7 @@ func AfterCommitBlock(keeper Keeper) error { // fmt.Fprintf(os.Stderr, "AFTER_COMMIT_BLOCK Returned from SwingSet: %s, %v\n", out, err) if err != nil { // Panic here, in the hopes that a replay from scratch will fix the problem. - panic(fmt.Errorf("AFTER_COMMIT_BLOCK failed: %w. Swingset is in an irrecoverable inconsistent state", err)) + panic(fmt.Errorf("AFTER_COMMIT_BLOCK failed: %s. Swingset is in an irrecoverable inconsistent state", err)) } return err } diff --git a/golang/cosmos/x/vbank/vbank.go b/golang/cosmos/x/vbank/vbank.go index 75ea273e11f..cdd3527a2e4 100644 --- a/golang/cosmos/x/vbank/vbank.go +++ b/golang/cosmos/x/vbank/vbank.go @@ -135,10 +135,10 @@ func (ch portHandler) Receive(ctx *vm.ControllerContext, str string) (ret string case "VBANK_GET_BALANCE": addr, err := sdk.AccAddressFromBech32(msg.Address) if err != nil { - return "", fmt.Errorf("cannot convert %s to address: %w", msg.Address, err) + return "", fmt.Errorf("cannot convert %s to address: %s", msg.Address, err) } if err = sdk.ValidateDenom(msg.Denom); err != nil { - return "", fmt.Errorf("invalid denom %s: %w", msg.Denom, err) + return "", fmt.Errorf("invalid denom %s: %s", msg.Denom, err) } coin := keeper.GetBalance(ctx.Context, addr, msg.Denom) packet := coin.Amount.String() @@ -152,10 +152,10 @@ func (ch portHandler) Receive(ctx *vm.ControllerContext, str string) (ret string case "VBANK_GRAB": addr, err := sdk.AccAddressFromBech32(msg.Sender) if err != nil { - return "", fmt.Errorf("cannot convert %s to address: %w", msg.Sender, err) + return "", fmt.Errorf("cannot convert %s to address: %s", msg.Sender, err) } if err = sdk.ValidateDenom(msg.Denom); err != nil { - return "", fmt.Errorf("invalid denom %s: %w", msg.Denom, err) + return "", fmt.Errorf("invalid denom %s: %s", msg.Denom, err) } value, ok := sdk.NewIntFromString(msg.Amount) if !ok { @@ -163,7 +163,7 @@ func (ch portHandler) Receive(ctx *vm.ControllerContext, str string) (ret string } coins := sdk.NewCoins(sdk.NewCoin(msg.Denom, value)) if err := keeper.GrabCoins(ctx.Context, addr, coins); err != nil { - return "", fmt.Errorf("cannot grab %s coins: %w", coins.Sort().String(), err) + return "", fmt.Errorf("cannot grab %s coins: %s", coins.Sort().String(), err) } addressToBalances := make(map[string]sdk.Coins, 1) addressToBalances[msg.Sender] = sdk.NewCoins(sdk.NewInt64Coin(msg.Denom, 1)) @@ -180,10 +180,10 @@ func (ch portHandler) Receive(ctx *vm.ControllerContext, str string) (ret string case "VBANK_GIVE": addr, err := sdk.AccAddressFromBech32(msg.Recipient) if err != nil { - return "", fmt.Errorf("cannot convert %s to address: %w", msg.Recipient, err) + return "", fmt.Errorf("cannot convert %s to address: %s", msg.Recipient, err) } if err = sdk.ValidateDenom(msg.Denom); err != nil { - return "", fmt.Errorf("invalid denom %s: %w", msg.Denom, err) + return "", fmt.Errorf("invalid denom %s: %s", msg.Denom, err) } value, ok := sdk.NewIntFromString(msg.Amount) if !ok { @@ -191,7 +191,7 @@ func (ch portHandler) Receive(ctx *vm.ControllerContext, str string) (ret string } coins := sdk.NewCoins(sdk.NewCoin(msg.Denom, value)) if err := keeper.SendCoins(ctx.Context, addr, coins); err != nil { - return "", fmt.Errorf("cannot give %s coins: %w", coins.Sort().String(), err) + return "", fmt.Errorf("cannot give %s coins: %s", coins.Sort().String(), err) } addressToBalances := make(map[string]sdk.Coins, 1) addressToBalances[msg.Recipient] = sdk.NewCoins(sdk.NewInt64Coin(msg.Denom, 1)) @@ -212,7 +212,7 @@ func (ch portHandler) Receive(ctx *vm.ControllerContext, str string) (ret string } coins := sdk.NewCoins(sdk.NewCoin(msg.Denom, value)) if err := keeper.StoreRewardCoins(ctx.Context, coins); err != nil { - return "", fmt.Errorf("cannot store reward %s coins: %w", coins.Sort().String(), err) + return "", fmt.Errorf("cannot store reward %s coins: %s", coins.Sort().String(), err) } if err != nil { return "", err From 6121ae4772d8ae374a40747aed8d96b064a387e6 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 18 Jun 2023 19:39:12 -0600 Subject: [PATCH 020/109] build(deployment): pin docker to explicit `node:18` --- packages/deployment/Dockerfile.sdk | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/deployment/Dockerfile.sdk b/packages/deployment/Dockerfile.sdk index 6f472bf2afa..f01063de889 100644 --- a/packages/deployment/Dockerfile.sdk +++ b/packages/deployment/Dockerfile.sdk @@ -16,7 +16,7 @@ RUN make GIT_COMMIT="$GIT_COMMIT" GIT_REVISION="$GIT_REVISION" MOD_READONLY= com # OTEL fetch # from https://github.com/open-telemetry/opentelemetry-collector-releases/releases -FROM node:lts-bullseye AS otel +FROM node:18-bullseye AS otel ARG OTEL_VERSION=0.48.0 ARG OTEL_HASH_arm64=846852f4c34f6e494abe202402fdf1d17e2ec3c7a7f96985b6011126ae553249 @@ -32,7 +32,7 @@ RUN set -eux; \ ############################### # The js build container -FROM node:lts-bullseye AS build-js +FROM node:18-bullseye AS build-js # When changing/adding entries here, make sure to search the whole project for # `@@AGORIC_DOCKER_SUBMODULES@@` @@ -68,7 +68,7 @@ RUN rm -rf packages/xsnap/moddable ############################### # The install container. -FROM node:lts-bullseye AS install +FROM node:18-bullseye AS install # Install some conveniences. RUN apt-get --allow-releaseinfo-change update && apt-get install -y vim jq less && apt-get clean -y From 8ed879c061d40dc0d05d2350edc6ea1c43fb6fe4 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sat, 10 Jun 2023 11:56:02 -0600 Subject: [PATCH 021/109] test(cosmos): ensure sdkerrors do not leak stack by default --- golang/cosmos/cmd/libdaemon/main_test.go | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/golang/cosmos/cmd/libdaemon/main_test.go b/golang/cosmos/cmd/libdaemon/main_test.go index 4257bcd553b..57d2a51e006 100644 --- a/golang/cosmos/cmd/libdaemon/main_test.go +++ b/golang/cosmos/cmd/libdaemon/main_test.go @@ -10,16 +10,23 @@ import ( func TestErrorStackTraces(t *testing.T) { err := sdkerrors.Wrapf(sdkerrors.ErrInsufficientFee, "my error %d", 123) - expected := "fail: my error 123: insufficient fee" + expected := "my error 123: insufficient fee" - // Expected message only (what we want). + // Check that sdkerrors.Wrapf(...).Error() does not leak stack. + got := err.Error() + if got != expected { + t.Fatalf("err.Error() %q should be %q", got, expected) + } + + // Check that fmt.Errorf("... %s").Error() does not leak stack. + expected = "fail: " + expected stringified := fmt.Errorf("fail: %s", err) - got := stringified.Error() + got = stringified.Error() if got != expected { t.Fatalf("stringified.Error() %q should be %q", got, expected) } - // Expected stack frame (though undesirable). + // Check that fmt.Errorf("... %w").Error() leaks stack. wrapped := fmt.Errorf("fail: %w", err) got = wrapped.Error() expectedAndStack := expected + " [" From 6edae755de93de081def1f4ec53907113221e3ba Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Wed, 24 May 2023 16:46:26 +0000 Subject: [PATCH 022/109] upgrade-test: split upgrade steps and tag --- packages/deployment/upgrade-test/Makefile | 36 ++++++++++++++++------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/packages/deployment/upgrade-test/Makefile b/packages/deployment/upgrade-test/Makefile index 187174bce05..0e439a81750 100644 --- a/packages/deployment/upgrade-test/Makefile +++ b/packages/deployment/upgrade-test/Makefile @@ -1,27 +1,43 @@ REPOSITORY = agoric/upgrade-test -dockerLabel = latest -ifdef TARGET -buildTargetFlag = --target $(TARGET) -dockerLabel = $(TARGET) -endif +BOOTSTRAP_MODE?=main +TARGET?=agoric-upgrade-11 +dockerLabel?=$(TARGET) ifdef TMUX_CC tmuxCC=1 else tmuxCC=0 endif -@echo buildTargetFlag: $(buildTargetFlag) +@echo target: $(TARGET) local_sdk: (cd ../ && make docker-build-sdk) +agoric-upgrade-7-2: + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-7-2 -t $(REPOSITORY):agoric-upgrade-7-2 -f Dockerfile upgrade-test-scripts + +agoric-upgrade-8: agoric-upgrade-7-2 + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-8 -t $(REPOSITORY):agoric-upgrade-8 -f Dockerfile upgrade-test-scripts + +agoric-upgrade-8-1: agoric-upgrade-8 + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-8-1 -t $(REPOSITORY):agoric-upgrade-8-1 -f Dockerfile upgrade-test-scripts + +agoric-upgrade-9: agoric-upgrade-8-1 + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-9 -t $(REPOSITORY):agoric-upgrade-9 -f Dockerfile upgrade-test-scripts + +agoric-upgrade-10: agoric-upgrade-9 + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-10 -t $(REPOSITORY):agoric-upgrade-10 -f Dockerfile upgrade-test-scripts + +agoric-upgrade-11: agoric-upgrade-10 + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-11 -t $(REPOSITORY):agoric-upgrade-11 -f Dockerfile upgrade-test-scripts + # build main bootstrap -build: - docker build --build-arg BOOTSTRAP_MODE=main --progress=plain $(buildTargetFlag) -t $(REPOSITORY):$(dockerLabel) -f Dockerfile upgrade-test-scripts +build: $(TARGET) # build test bootstrap -build_test: - docker build --build-arg BOOTSTRAP_MODE=test --progress=plain $(buildTargetFlag) -t $(REPOSITORY):$(dockerLabel) -f Dockerfile upgrade-test-scripts +build_test: BOOTSTRAP_MODE=test +build_test: $(TARGET) run: docker run --rm -it -e "DEST=1" -e "TMUX_USE_CC=$(tmuxCC)" -p 26656:26656 -p 26657:26657 -p 1317:1317 --entrypoint "/usr/src/agoric-sdk/upgrade-test-scripts/start_to_to.sh" -v "$${PWD}:/workspace" $(REPOSITORY):$(dockerLabel) +.PHONY: local_sdk agoric-upgrade-7-2 agoric-upgrade-8 agoric-upgrade-8-1 agoric-upgrade-9 agoric-upgrade-10 agoric-upgrade-11 build build_test run From 70f6b9007f63793da762cc72e6c3b003fa1a3fe5 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Mon, 15 May 2023 22:09:06 +0000 Subject: [PATCH 023/109] fix: use dev instead of latest --- packages/deployment/upgrade-test/Dockerfile | 4 ++-- packages/deployment/upgrade-test/Makefile | 14 ++++++++------ 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index a75c4f69cc1..ffe27896c4c 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -1,4 +1,4 @@ -ARG DEST_IMAGE=ghcr.io/agoric/agoric-sdk:latest +ARG DEST_IMAGE=ghcr.io/agoric/agoric-sdk:dev ARG BOOTSTRAP_MODE=main # on agoric-uprade-7-2, with upgrade to agoric-upgrade-8 FROM ghcr.io/agoric/ag0:agoric-upgrade-7-2 as agoric-upgrade-7-2 @@ -57,7 +57,7 @@ RUN . ./upgrade-test-scripts/start_to_to.sh ARG DEST_IMAGE #this is agoric-upgrade-10 / vaults -FROM ${DEST_IMAGE} as agoric-upgrade-10 +FROM ghcr.io/agoric/agoric-sdk:34 as agoric-upgrade-10 ARG BOOTSTRAP_MODE ENV THIS_NAME=agoric-upgrade-10 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} diff --git a/packages/deployment/upgrade-test/Makefile b/packages/deployment/upgrade-test/Makefile index 0e439a81750..0f70f0fa9ff 100644 --- a/packages/deployment/upgrade-test/Makefile +++ b/packages/deployment/upgrade-test/Makefile @@ -1,4 +1,6 @@ REPOSITORY = agoric/upgrade-test +# use :dev (latest prerelease image) unless we build local sdk +DEST_IMAGE = $(if $(findstring local_sdk,$(MAKECMDGOALS)),ghcr.io/agoric/agoric-sdk:latest,ghcr.io/agoric/agoric-sdk:dev) BOOTSTRAP_MODE?=main TARGET?=agoric-upgrade-11 dockerLabel?=$(TARGET) @@ -13,22 +15,22 @@ local_sdk: (cd ../ && make docker-build-sdk) agoric-upgrade-7-2: - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-7-2 -t $(REPOSITORY):agoric-upgrade-7-2 -f Dockerfile upgrade-test-scripts + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-7-2 -t $(REPOSITORY):agoric-upgrade-7-2 -f Dockerfile upgrade-test-scripts agoric-upgrade-8: agoric-upgrade-7-2 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-8 -t $(REPOSITORY):agoric-upgrade-8 -f Dockerfile upgrade-test-scripts + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-8 -t $(REPOSITORY):agoric-upgrade-8 -f Dockerfile upgrade-test-scripts agoric-upgrade-8-1: agoric-upgrade-8 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-8-1 -t $(REPOSITORY):agoric-upgrade-8-1 -f Dockerfile upgrade-test-scripts + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-8-1 -t $(REPOSITORY):agoric-upgrade-8-1 -f Dockerfile upgrade-test-scripts agoric-upgrade-9: agoric-upgrade-8-1 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-9 -t $(REPOSITORY):agoric-upgrade-9 -f Dockerfile upgrade-test-scripts + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-9 -t $(REPOSITORY):agoric-upgrade-9 -f Dockerfile upgrade-test-scripts agoric-upgrade-10: agoric-upgrade-9 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-10 -t $(REPOSITORY):agoric-upgrade-10 -f Dockerfile upgrade-test-scripts + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-10 -t $(REPOSITORY):agoric-upgrade-10 -f Dockerfile upgrade-test-scripts agoric-upgrade-11: agoric-upgrade-10 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --progress=plain --target agoric-upgrade-11 -t $(REPOSITORY):agoric-upgrade-11 -f Dockerfile upgrade-test-scripts + docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-11 -t $(REPOSITORY):agoric-upgrade-11 -f Dockerfile upgrade-test-scripts # build main bootstrap build: $(TARGET) From 241f0f2a2869f56355561ebdc2841bcfa266033d Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Wed, 24 May 2023 19:44:26 -0600 Subject: [PATCH 024/109] test(xsnap): check trivial snapshot golden hashes --- .../xsnap/test/snapshots/test-xsnap.js.md | 15 ++++++ .../xsnap/test/snapshots/test-xsnap.js.snap | Bin 0 -> 318 bytes packages/xsnap/test/test-xsnap.js | 50 ++++++++++++++++++ 3 files changed, 65 insertions(+) create mode 100644 packages/xsnap/test/snapshots/test-xsnap.js.md create mode 100644 packages/xsnap/test/snapshots/test-xsnap.js.snap diff --git a/packages/xsnap/test/snapshots/test-xsnap.js.md b/packages/xsnap/test/snapshots/test-xsnap.js.md new file mode 100644 index 00000000000..eec90719b35 --- /dev/null +++ b/packages/xsnap/test/snapshots/test-xsnap.js.md @@ -0,0 +1,15 @@ +# Snapshot report for `test/test-xsnap.js` + +The actual snapshot is saved in `test-xsnap.js.snap`. + +Generated by [AVA](https://avajs.dev). + +## produce golden snapshot hashes + +> no evaluations + + '91d30e59c1a087d58bb6d8eefcf1262e99e59cfc249222ab25f881ac642437e5' + +> smallish safeInteger multiplication doesn't spill to XS_NUMBER_KIND + + '4b48f6c58c08bb757efd3b8fb21891a386bdc5bfbae6803c8cb7df108e553ace' diff --git a/packages/xsnap/test/snapshots/test-xsnap.js.snap b/packages/xsnap/test/snapshots/test-xsnap.js.snap new file mode 100644 index 0000000000000000000000000000000000000000..c4e93b1b54a770305f26642187162713e2c3522c GIT binary patch literal 318 zcmV-E0m1%3RzV~~R{?8M2`y&^ z9<0TJ%i(NxF*{~{KD(UX^wQQ9>z*W5&{++}v=*2hIjLI3eSaJssis#-p;6iaNTjw= zP2I8PfQCbiN$@*{9TuNr{-0`01Fj_vRQ)KO?%3JdfHO)$FR_N!SklHB { test('write and read snapshot (use FS)', writeAndReadSnapshot, true); test('write and read snapshot (use stream)', writeAndReadSnapshot, false); +test('produce golden snapshot hashes', async t => { + t.log(`\ +The snapshot hashes produced by this test were created from this package's +version of xsnap compiled for and run on Agoric's supported (within-consensus) +platforms. + +The snapshot will change (and the test will fail) if xsnap or this platform +is not compatible with this predefined consensus. This is likely to happen +in the future when xsnap is upgraded, in which case there will need to be +special accommodation for the new version, not just generating new golden +hashes. +`); + const toEvals = [ + [`no evaluations`, ''], + [ + `smallish safeInteger multiplication doesn't spill to XS_NUMBER_KIND`, + `globalThis.bazinga = 100; globalThis.bazinga *= 1_000_000;`, + ], + ]; + for await (const [description, toEval] of toEvals) { + t.log(description); + const messages = []; + async function handleCommand(message) { + messages.push(decode(message)); + return new Uint8Array(); + } + + const vat0 = await xsnap({ + ...options(io), + handleCommand, + snapshotUseFs: false, + }); + if (toEval) { + await vat0.evaluate(toEval); + } + + const hash = createHash('sha256'); + for await (const buf of vat0.makeSnapshotStream()) { + hash.update(buf); + } + await vat0.close(); + + const hexHash = hash.digest('hex'); + t.log(`${description} produces golden hash ${hexHash}`); + t.snapshot(hexHash, description); + t.deepEqual(messages, [], `${description} messages`); + } +}); + test('execute immediately after makeSnapshotStream', async t => { const messages = []; async function handleCommand(message) { From 27017160b6861b6696b200afe8c710ed5c58aff2 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 20 Jun 2023 16:37:58 -0600 Subject: [PATCH 025/109] ci(cosmos): forbid `%w` error-wrapping format specifier --- .github/workflows/golangci-lint.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index 9cb9c995468..7d1e387affe 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -36,3 +36,10 @@ jobs: # only-new-issues: true only-new-issues: false working-directory: ./golang/cosmos + - name: forbid %w error-wrapping format specifier + run: | + set -e + if find ./golang/cosmos -name '*.go' ! -name '*_test.go' -print0 | xargs -0 grep '%w'; then + echo "Found %w in ./golang/cosmos; please use %s instead." + exit 1 + fi From 9ca048408f04cea9a67d74f59c8e349ed2766f21 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Mon, 15 May 2023 22:28:13 +0000 Subject: [PATCH 026/109] github action uses locally built latest --- .github/workflows/test-all-packages.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-all-packages.yml b/.github/workflows/test-all-packages.yml index 2027e01e5eb..0c752ada3db 100644 --- a/.github/workflows/test-all-packages.yml +++ b/.github/workflows/test-all-packages.yml @@ -573,7 +573,7 @@ jobs: - name: docker build (sdk) run: cd packages/deployment && ./scripts/test-docker-build.sh | $TEST_COLLECT - name: docker build upgrade test - run: cd packages/deployment/upgrade-test && docker build --build-arg BOOTSTRAP_MODE=${{ matrix.bootstrap-version }} -t docker-upgrade-test:latest -f Dockerfile upgrade-test-scripts + run: cd packages/deployment/upgrade-test && docker build --build-arg BOOTSTRAP_MODE=${{ matrix.bootstrap-version }} --build-arg DEST_IMAGE=ghcr.io/agoric/agoric-sdk:latest -t docker-upgrade-test:latest -f Dockerfile upgrade-test-scripts - name: docker run upgrade final stage run: docker run --env "DEST=0" docker-upgrade-test:latest - uses: ./.github/actions/post-test From dc8a7f42ebecf7e3db6fde281058031ca58d0097 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Wed, 24 May 2023 19:50:31 -0600 Subject: [PATCH 027/109] build(xsnap): use `-D__has_builtin(x)=1` to avoid divergence --- packages/xsnap/src/build.js | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/xsnap/src/build.js b/packages/xsnap/src/build.js index fbb4bbb671b..2fcba467a21 100644 --- a/packages/xsnap/src/build.js +++ b/packages/xsnap/src/build.js @@ -235,6 +235,7 @@ async function main(args, { env, stdout, spawn, fs, os }) { `MODDABLE=${ModdableSDK.MODDABLE}`, `GOAL=${goal}`, `XSNAP_VERSION=${pkg.version}`, + `CC=cc "-D__has_builtin(x)=1"`, '-f', 'xsnap-worker.mk', ], From 69c730a549ee30b49500978efd29c00669b4a7a9 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Mon, 15 May 2023 22:37:45 +0000 Subject: [PATCH 028/109] update docs --- packages/deployment/upgrade-test/Readme.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/deployment/upgrade-test/Readme.md b/packages/deployment/upgrade-test/Readme.md index 3c6176a5f9e..44e21de4841 100644 --- a/packages/deployment/upgrade-test/Readme.md +++ b/packages/deployment/upgrade-test/Readme.md @@ -20,6 +20,19 @@ This will build all previous upgrades and upgrade each one. make build ``` +By default pre-releases use the lastest image tagged `dev` in our [container repository](https://github.com/agoric/agoric-sdk/pkgs/container/agoric-sdk). To use +a specific build: + +```shell +DEST_IMAGE=docker pull ghcr.io/agoric/agoric-sdk:20230515033839-e56ae7 +``` +To use a build based on local changes: +```shell +# build ghcr.io/agoric/agoric-sdk:latest +make local_sdk build +# or DEST_IMAGE=ghcr.io/agoric/agoric-sdk:latest make build +``` + **To run the latest upgrade interactively** ```shell From 745a5cc2a049069cdea4ebb07690f66d16976061 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 2 Jul 2023 19:28:49 -0600 Subject: [PATCH 029/109] ci(deployment-test): remove hardwired /usr/src/agoric-sdk --- .github/workflows/deployment-test.yml | 15 ++------------- .../deployment/scripts/integration-test.sh | 16 ++++++++++------ scripts/run-deployment-integration.sh | 18 +++++++----------- 3 files changed, 19 insertions(+), 30 deletions(-) diff --git a/.github/workflows/deployment-test.yml b/.github/workflows/deployment-test.yml index 07f413162f4..794b82dfe74 100644 --- a/.github/workflows/deployment-test.yml +++ b/.github/workflows/deployment-test.yml @@ -64,23 +64,13 @@ jobs: path: testnet-load-generator ref: ${{steps.get-loadgen-branch.outputs.result}} - - name: Put repos under /usr/src where scripts expect them - run: | - set -e - sudo mv "$GITHUB_WORKSPACE/testnet-load-generator" /usr/src/testnet-load-generator - sudo cp -a "$GITHUB_WORKSPACE" /usr/src/agoric-sdk - ln -s /usr/src/agoric-sdk/packages/deployment/bin/ag-setup-cosmos /usr/local/bin/ag-setup-cosmos - working-directory: / - - name: Build cosmic-swingset dependencies run: | set -e cd packages/cosmic-swingset make install - working-directory: /usr/src/agoric-sdk - - run: /usr/src/agoric-sdk/packages/deployment/scripts/integration-test.sh + - run: packages/deployment/scripts/integration-test.sh timeout-minutes: 90 - working-directory: /usr/src/agoric-sdk env: NETWORK_NAME: chaintest - name: capture results @@ -97,14 +87,13 @@ jobs: # Tear down the nodes. echo yes | packages/deployment/scripts/setup.sh destroy || true - working-directory: /usr/src/agoric-sdk env: NETWORK_NAME: chaintest - uses: actions/upload-artifact@v3 if: always() with: name: deployment-test-results-${{ env.NOW }} - path: /usr/src/agoric-sdk/chaintest/results + path: chaintest/results - name: notify on failure if: failure() && github.event_name != 'pull_request' diff --git a/packages/deployment/scripts/integration-test.sh b/packages/deployment/scripts/integration-test.sh index dbac085d216..913d300610f 100755 --- a/packages/deployment/scripts/integration-test.sh +++ b/packages/deployment/scripts/integration-test.sh @@ -21,7 +21,7 @@ cd "$NETWORK_NAME/setup" export AG_SETUP_COSMOS_HOME=${AG_SETUP_COSMOS_HOME-$PWD} export AG_SETUP_COSMOS_STATE_SYNC_INTERVAL=20 -AGORIC_SDK_PATH=${AGORIC_SDK_PATH-$(cd "$thisdir/../../.." > /dev/null && pwd -P)} +SDK_SRC=${SDK_SRC-$(cd "$thisdir/../../.." > /dev/null && pwd -P)} if [ -d /usr/src/testnet-load-generator ] then @@ -44,18 +44,22 @@ VAULT_FACTORY_CONTROLLER_ADDR="$SOLO_ADDR" \ CHAIN_BOOTSTRAP_VAT_CONFIG="$VAT_CONFIG" \ "$thisdir/setup.sh" bootstrap ${1+"$@"} -if [ -d /usr/src/testnet-load-generator ] +loadgen="$PWD/testnet-load-genereator" +if [ ! -d "$loadgen" ]; then + loadgen=/usr/src/testnet-load-generator +fi +if [ -d "$loadgen" ] then - /usr/src/agoric-sdk/packages/deployment/scripts/setup.sh show-config > "$RESULTSDIR/network-config" + "$SDK_SRC/packages/deployment/scripts/setup.sh" show-config > "$RESULTSDIR/network-config" cp ag-chain-cosmos/data/genesis.json "$RESULTSDIR/genesis.json" cp "$AG_SETUP_COSMOS_HOME/ag-chain-cosmos/data/genesis.json" "$RESULTSDIR/genesis.json" - cd /usr/src/testnet-load-generator + cd "$loadgen" SOLO_COINS=40000000000uist \ "$AG_SETUP_COSMOS_HOME/faucet-helper.sh" add-egress loadgen "$SOLO_ADDR" - SLOGSENDER=@agoric/telemetry/src/otel-trace.js SOLO_SLOGSENDER= \ + SLOGSENDER=@agoric/telemetry/src/otel-trace.js SOLO_SLOGSENDER="" \ SLOGSENDER_FAIL_ON_ERROR=1 SLOGSENDER_AGENT=process \ AG_CHAIN_COSMOS_HOME=$HOME/.agoric \ - SDK_BUILD=0 MUST_USE_PUBLISH_BUNDLE=1 SDK_SRC=/usr/src/agoric-sdk OUTPUT_DIR="$RESULTSDIR" ./start.sh \ + SDK_BUILD=0 MUST_USE_PUBLISH_BUNDLE=1 SDK_SRC=$SDK_SRC OUTPUT_DIR="$RESULTSDIR" ./start.sh \ --no-stage.save-storage \ --stages=3 --stage.duration=10 --stage.loadgen.cycles=4 \ --stage.loadgen.faucet.interval=6 --stage.loadgen.faucet.limit=4 \ diff --git a/scripts/run-deployment-integration.sh b/scripts/run-deployment-integration.sh index d5511a02039..04b5b9c5e95 100644 --- a/scripts/run-deployment-integration.sh +++ b/scripts/run-deployment-integration.sh @@ -1,24 +1,20 @@ #!/bin/sh set -xueo pipefail -SDK_REAL_DIR="$(cd "$(dirname "$(readlink -f -- "$0")")/.." > /dev/null && pwd -P)" +SDK_SRC="$(cd "$(dirname "$(readlink -f -- "$0")")/.." > /dev/null && pwd -P)" +export SDK_SRC -# For some reason something in the integration script -# relies on the SDK being at that location # Set AGORIC_SDK_PATH to the SDK path on the host if this # script is running inside a docker environment (and make sure to # bind mount /var/run/docker.sock) -if [ "$SDK_REAL_DIR" != "/usr/src/agoric-sdk" ]; then - echo 'Agoric SDK must be mounted in "/usr/src/agoric-sdk"' - exit 1 -fi +export AGORIC_SDK_PATH="${AGORIC_SDK_PATH-$SDK_SRC}" export NETWORK_NAME=chaintest -sudo ln -sf /usr/src/agoric-sdk/packages/deployment/bin/ag-setup-cosmos /usr/local/bin/ag-setup-cosmos -rm -rf /usr/src/agoric-sdk/chaintest ~/.ag-chain-cosmos/ /usr/src/testnet-load-generator/_agstate/agoric-servers/testnet-8000 +sudo ln -sf "$SDK_SRC/packages/deployment/bin/ag-setup-cosmos" /usr/local/bin/ag-setup-cosmos +rm -rf "$SDK_SRC/chaintest" ~/.ag-chain-cosmos/ /usr/src/testnet-load-generator/_agstate/agoric-servers/testnet-8000 -cd /usr/src/agoric-sdk/ +cd "$SDK_SRC" sudo ./packages/deployment/scripts/install-deps.sh yarn install && XSNAP_RANDOM_INIT=1 yarn build && make -C packages/cosmic-swingset/ # change to "false" to skip extraction on success like in CI @@ -33,4 +29,4 @@ packages/deployment/scripts/capture-integration-results.sh $testfailure echo yes | packages/deployment/scripts/setup.sh destroy || true # Not part of CI -/usr/src/agoric-sdk/scripts/process-integration-results.sh $NETWORK_NAME/results +scripts/process-integration-results.sh $NETWORK_NAME/results From 92620ea21efc328ae84c76f0cd4d443545633be0 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 10 Jul 2023 19:33:00 +0000 Subject: [PATCH 030/109] refactor(cosmic-swingset): portNums are static in init --- golang/cosmos/app/app.go | 19 ++- golang/cosmos/vm/controller.go | 3 +- golang/cosmos/x/swingset/abci.go | 3 - golang/cosmos/x/swingset/genesis.go | 15 +- golang/cosmos/x/swingset/keeper/msg_server.go | 2 - packages/cosmic-swingset/src/chain-main.js | 133 ++++++++++-------- 6 files changed, 88 insertions(+), 87 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 9d037a3bf3f..823074aa5e7 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -440,8 +440,7 @@ func NewAgoricApp( app.VstorageKeeper = vstorage.NewKeeper( keys[vstorage.StoreKey], ) - vm.RegisterPortHandler("vstorage", vstorage.NewStorageHandler(app.VstorageKeeper)) - app.vstoragePort = vm.GetPort("vstorage") + app.vstoragePort = vm.RegisterPortHandler("vstorage", vstorage.NewStorageHandler(app.VstorageKeeper)) // The SwingSetKeeper is the Keeper from the SwingSet module app.SwingSetKeeper = swingset.NewKeeper( @@ -810,12 +809,12 @@ type cosmosInitAction struct { Type string `json:"type"` ChainID string `json:"chainID"` Params swingset.Params `json:"params"` - StoragePort int `json:"storagePort"` SupplyCoins sdk.Coins `json:"supplyCoins"` - VibcPort int `json:"vibcPort"` - VbankPort int `json:"vbankPort"` - LienPort int `json:"lienPort"` UpgradePlan *upgradetypes.Plan `json:"upgradePlan,omitempty"` + LienPort int `json:"lienPort"` + StoragePort int `json:"storagePort"` + VbankPort int `json:"vbankPort"` + VibcPort int `json:"vibcPort"` } // Name returns the name of the App @@ -831,12 +830,12 @@ func (app *GaiaApp) MustInitController(ctx sdk.Context) { Type: "AG_COSMOS_INIT", ChainID: ctx.ChainID(), Params: app.SwingSetKeeper.GetParams(ctx), - StoragePort: app.vstoragePort, SupplyCoins: sdk.NewCoins(app.BankKeeper.GetSupply(ctx, "uist")), - VibcPort: app.vibcPort, - VbankPort: app.vbankPort, - LienPort: app.lienPort, UpgradePlan: app.upgradePlan, + LienPort: app.lienPort, + StoragePort: app.vstoragePort, + VbankPort: app.vbankPort, + VibcPort: app.vibcPort, } out, err := app.SwingSetKeeper.BlockingSend(ctx, action) diff --git a/golang/cosmos/vm/controller.go b/golang/cosmos/vm/controller.go index bf4f317f402..acc70f3e573 100644 --- a/golang/cosmos/vm/controller.go +++ b/golang/cosmos/vm/controller.go @@ -8,7 +8,6 @@ import ( type ControllerContext struct { Context sdk.Context - StoragePort int IBCChannelHandlerPort int } @@ -74,7 +73,7 @@ func UnregisterPortHandler(portNum int) error { func ReceiveFromController(portNum int, msg string) (string, error) { handler := portToHandler[portNum] if handler == nil { - return "", fmt.Errorf("Unregistered port %d", portNum) + return "", fmt.Errorf("unregistered port %d", portNum) } return handler.Receive(&controllerContext, msg) } diff --git a/golang/cosmos/x/swingset/abci.go b/golang/cosmos/x/swingset/abci.go index ec9077f30f8..567975c995f 100644 --- a/golang/cosmos/x/swingset/abci.go +++ b/golang/cosmos/x/swingset/abci.go @@ -9,13 +9,11 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" abci "github.com/tendermint/tendermint/abci/types" - "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" ) type beginBlockAction struct { Type string `json:"type"` - StoragePort int `json:"storagePort"` BlockHeight int64 `json:"blockHeight"` BlockTime int64 `json:"blockTime"` ChainID string `json:"chainID"` @@ -39,7 +37,6 @@ func BeginBlock(ctx sdk.Context, req abci.RequestBeginBlock, keeper Keeper) erro action := &beginBlockAction{ Type: "BEGIN_BLOCK", - StoragePort: vm.GetPort("vstorage"), BlockHeight: ctx.BlockHeight(), BlockTime: ctx.BlockTime().Unix(), ChainID: ctx.ChainID(), diff --git a/golang/cosmos/x/swingset/genesis.go b/golang/cosmos/x/swingset/genesis.go index 8205d4836ba..ccdfd9468da 100644 --- a/golang/cosmos/x/swingset/genesis.go +++ b/golang/cosmos/x/swingset/genesis.go @@ -5,7 +5,6 @@ import ( "fmt" stdlog "log" - "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" sdk "github.com/cosmos/cosmos-sdk/types" abci "github.com/tendermint/tendermint/abci/types" @@ -32,18 +31,16 @@ func DefaultGenesisState() *types.GenesisState { } type bootstrapBlockAction struct { - Type string `json:"type"` - BlockTime int64 `json:"blockTime"` - StoragePort int `json:"storagePort"` + Type string `json:"type"` + BlockTime int64 `json:"blockTime"` } func BootSwingset(ctx sdk.Context, keeper Keeper) error { - // Just run the SwingSet kernel to finish bootstrap and get ready to open for + // Just run the SwingSet kernel to finish bootstrap and get ready to open for // business. action := &bootstrapBlockAction{ - Type: "BOOTSTRAP_BLOCK", - BlockTime: ctx.BlockTime().Unix(), - StoragePort: vm.GetPort("vstorage"), + Type: "BOOTSTRAP_BLOCK", + BlockTime: ctx.BlockTime().Unix(), } _, err := keeper.BlockingSend(ctx, action) @@ -56,7 +53,7 @@ func InitGenesis(ctx sdk.Context, keeper Keeper, data *types.GenesisState) []abc stdlog.Println("Running SwingSet until bootstrap is ready") err := BootSwingset(ctx, keeper) - + // fmt.Fprintf(os.Stderr, "BOOTSTRAP_BLOCK Returned from swingset: %s, %v\n", out, err) if err != nil { // NOTE: A failed BOOTSTRAP_BLOCK means that the SwingSet state is inconsistent. diff --git a/golang/cosmos/x/swingset/keeper/msg_server.go b/golang/cosmos/x/swingset/keeper/msg_server.go index 65f3d108f05..0d959ce23cc 100644 --- a/golang/cosmos/x/swingset/keeper/msg_server.go +++ b/golang/cosmos/x/swingset/keeper/msg_server.go @@ -25,7 +25,6 @@ type deliverInboundAction struct { Peer string `json:"peer"` Messages [][]interface{} `json:"messages"` Ack uint64 `json:"ack"` - StoragePort int `json:"storagePort"` BlockHeight int64 `json:"blockHeight"` BlockTime int64 `json:"blockTime"` } @@ -58,7 +57,6 @@ func (keeper msgServer) DeliverInbound(goCtx context.Context, msg *types.MsgDeli Peer: msg.Submitter.String(), Messages: messages, Ack: msg.Ack, - StoragePort: vm.GetPort("vstorage"), BlockHeight: ctx.BlockHeight(), BlockTime: ctx.BlockTime().Unix(), } diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 89f383e4214..46ef7291ef3 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -240,11 +240,11 @@ export default async function main(progname, args, { env, homedir, agcc }) { /** @type {((obj: object) => void) | undefined} */ let writeSlogObject; - // this storagePort changes for every single message. We define it out here - // so the 'externalStorage' object can close over the single mutable - // instance, and we update the 'portNums.storage' value each time toSwingSet is called + // the storagePort used to change for every single message. It's defined out + // here so 'sendToChainStorage' can close over the single mutable instance, + // when we updated the 'portNums.storage' value each time toSwingSet was called. async function launchAndInitializeSwingSet(bootMsg) { - const sendToChain = msg => chainSend(portNums.storage, msg); + const sendToChainStorage = msg => chainSend(portNums.storage, msg); // this object is used to store the mailbox state. const fromBridgeMailbox = data => { const ack = toNumber(data.ack); @@ -253,7 +253,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { }; const mailboxStorage = makeReadCachingStorage( makePrefixedBridgeStorage( - sendToChain, + sendToChainStorage, `${STORAGE_PATH.MAILBOX}.`, 'legacySet', val => fromBridgeMailbox(JSON.parse(val)), @@ -263,7 +263,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { const makeQueueStorage = queuePath => { const { kvStore, commit, abort } = makeBufferedStorage( makePrefixedBridgeStorage( - sendToChain, + sendToChainStorage, `${queuePath}.`, 'setWithoutNotify', x => x, @@ -305,7 +305,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { } return [path, value]; }); - sendToChain( + sendToChainStorage( stringify({ method: 'setWithoutNotify', args: entries, @@ -612,69 +612,80 @@ export default async function main(progname, args, { env, homedir, agcc }) { async function toSwingSet(action, _replier) { // console.log(`toSwingSet`, action); - if (action.vibcPort) { - portNums.dibc = action.vibcPort; - } - if (action.storagePort) { - // Initialize the storage for this particular transaction. - // console.log(` setting portNums.storage to`, action.storagePort); - portNums.storage = action.storagePort; - } + await null; - if (action.vbankPort) { - portNums.bank = action.vbankPort; - } + switch (action.type) { + case AG_COSMOS_INIT: { + // console.error('got AG_COSMOS_INIT', action); - if (action.lienPort) { - portNums.lien = action.lienPort; - } + !blockingSend || Fail`Swingset already initialized`; - // Snapshot actions are specific to cosmos chains and handled here - if (action.type === ActionType.COSMOS_SNAPSHOT) { - const { blockHeight, request, args: requestArgs } = action; - writeSlogObject?.({ - type: 'cosmic-swingset-snapshot-start', - blockHeight, - request, - args: requestArgs, - }); + if (action.vibcPort) { + portNums.dibc = action.vibcPort; + } - const resultP = handleCosmosSnapshot(blockHeight, request, requestArgs); + if (action.storagePort) { + portNums.storage = action.storagePort; + } - resultP.then( - result => { - writeSlogObject?.({ - type: 'cosmic-swingset-snapshot-finish', - blockHeight, - request, - args: requestArgs, - result, - }); - }, - error => { - writeSlogObject?.({ - type: 'cosmic-swingset-snapshot-finish', - blockHeight, - request, - args: requestArgs, - error, - }); - }, - ); + if (action.vbankPort) { + portNums.bank = action.vbankPort; + } - return resultP; - } + if (action.lienPort) { + portNums.lien = action.lienPort; + } + harden(portNums); - // Ensure that initialization has completed. - blockingSend = await (blockingSend || launchAndInitializeSwingSet(action)); + // Ensure that initialization has completed. + blockingSend = await launchAndInitializeSwingSet(action); - if (action.type === AG_COSMOS_INIT) { - // console.error('got AG_COSMOS_INIT', action); - return true; - } + return true; + } + + // Snapshot actions are specific to cosmos chains and handled here + case ActionType.COSMOS_SNAPSHOT: { + const { blockHeight, request, args: requestArgs } = action; + writeSlogObject?.({ + type: 'cosmic-swingset-snapshot-start', + blockHeight, + request, + args: requestArgs, + }); + + const resultP = handleCosmosSnapshot(blockHeight, request, requestArgs); + + resultP.then( + result => { + writeSlogObject?.({ + type: 'cosmic-swingset-snapshot-finish', + blockHeight, + request, + args: requestArgs, + result, + }); + }, + error => { + writeSlogObject?.({ + type: 'cosmic-swingset-snapshot-finish', + blockHeight, + request, + args: requestArgs, + error, + }); + }, + ); + + return resultP; + } - // Block related actions are processed by `blockingSend` - return blockingSend(action); + default: { + if (!blockingSend) throw Fail`Swingset not initialized`; + + // Block related actions are processed by `blockingSend` + return blockingSend(action); + } + } } } From fa942e2f17026635e70e9f618bc1f9005498c862 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 3 Jul 2023 01:34:08 -0600 Subject: [PATCH 031/109] fix(solo): let `init` tolerate existing `html` directory --- packages/solo/src/init-basedir.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/solo/src/init-basedir.js b/packages/solo/src/init-basedir.js index e704e2e7346..384d81c3b84 100644 --- a/packages/solo/src/init-basedir.js +++ b/packages/solo/src/init-basedir.js @@ -55,7 +55,7 @@ export default function initBasedir( `${JSON.stringify(connections)}\n`, ); const dstHtmldir = path.join(basedir, 'html'); - fs.mkdirSync(dstHtmldir); + fs.mkdirSync(dstHtmldir, { recursive: true }); // Save the configuration options. fs.writeFileSync(path.join(basedir, 'options.json'), JSON.stringify(options)); From 9a51df515b87638b869564ab08445a0ce0d55707 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 11 Jul 2023 22:24:27 +0000 Subject: [PATCH 032/109] fix(x/vstorage): value can be empty in genesis data --- golang/cosmos/x/vstorage/genesis.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/golang/cosmos/x/vstorage/genesis.go b/golang/cosmos/x/vstorage/genesis.go index 2d596166829..970a70a8e7e 100644 --- a/golang/cosmos/x/vstorage/genesis.go +++ b/golang/cosmos/x/vstorage/genesis.go @@ -22,9 +22,6 @@ func ValidateGenesis(data *types.GenesisState) error { if err := types.ValidatePath(entry.Path); err != nil { return fmt.Errorf("genesis vstorage.data entry %q has invalid path format: %s", entry.Path, err) } - if entry.Value == "" { - return fmt.Errorf("genesis vstorage.data entry %q has no data", entry.Path) - } } return nil } From 3a8e4c0d576d9b6ff959d1897f99db55e85963ea Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 3 Jul 2023 01:37:10 -0600 Subject: [PATCH 033/109] feat(deployment): only include this package in `setup` docker image --- .dockerignore | 5 ++-- packages/deployment/Dockerfile | 13 ++-------- packages/deployment/Dockerfile.sdk | 10 +++---- packages/deployment/Makefile | 2 +- .../ansible/roles/copy/tasks/main.yml | 9 +++---- packages/deployment/docker/ag-setup-cosmos | 6 ++++- .../deployment/scripts/integration-test.sh | 26 +++++++++++-------- packages/deployment/scripts/setup.sh | 3 +-- packages/deployment/src/init.js | 2 +- .../scripts => scripts}/smoketest-binaries.sh | 0 10 files changed, 37 insertions(+), 39 deletions(-) rename {packages/deployment/scripts => scripts}/smoketest-binaries.sh (100%) diff --git a/.dockerignore b/.dockerignore index 1b4e5f1f0f9..3647c99be6d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,14 +3,15 @@ docker packages/cosmic-swingset/t[0-9] packages/cosmic-swingset/t[0-9].* -packages/cosmic-swingset/lib/lib*.h -packages/cosmic-swingset/lib/lib*.so +golang/cosmos/build +packages/deployment packages/swingset-runner packages/stat-logger **/deployment.json **/vars.tf **/*.log **/build +**/bundles **/__pycache__ **/*.egg-info **/swingset-kernel-state diff --git a/packages/deployment/Dockerfile b/packages/deployment/Dockerfile index f490a11c58c..88bbde7a0c2 100644 --- a/packages/deployment/Dockerfile +++ b/packages/deployment/Dockerfile @@ -2,22 +2,13 @@ ARG TAG=latest ARG REPO=agoric/agoric-sdk ARG REGISTRY=ghcr.io -# FIXME: Journalbeat compilation is currently broken, but non-essential. -# Removed from the build. -# FROM golang:1.20-bullseye AS go-build - -# WORKDIR /usr/src/journalbeat -# RUN apt-get update -y && apt-get install -y libsystemd-dev -# RUN go get github.com/mheese/journalbeat - FROM $REGISTRY/$REPO:$TAG +COPY ./scripts/install-deps.sh /usr/src/agoric-sdk/packages/deployment/scripts/ RUN /usr/src/agoric-sdk/packages/deployment/scripts/install-deps.sh -# # Copy journalbeat for logging support -# COPY --from=go-build /go/bin/journalbeat /usr/local/bin/ - WORKDIR /usr/src/agoric-sdk/packages/deployment +COPY . . RUN ln -sf $PWD/bin/ag-setup-cosmos /usr/local/bin/ag-setup-cosmos WORKDIR /data/chains diff --git a/packages/deployment/Dockerfile.sdk b/packages/deployment/Dockerfile.sdk index f01063de889..71b0ad65307 100644 --- a/packages/deployment/Dockerfile.sdk +++ b/packages/deployment/Dockerfile.sdk @@ -62,7 +62,7 @@ RUN \ yarn build # Remove dev dependencies. -RUN rm -rf packages/xsnap/moddable +RUN rm -rf packages/xsnap/moddable packages/xsnap/xsnap-native/build/tmp # FIXME: This causes bundling differences. https://github.com/endojs/endo/issues/919 # RUN yarn install --frozen-lockfile --production --network-timeout 100000 @@ -86,11 +86,11 @@ ARG GIT_REVISION=unknown RUN echo "$GIT_REVISION" > /usr/src/agoric-sdk/packages/solo/public/git-revision.txt # Compatibility links for older containers. -RUN ln -s /data /agoric -RUN ln -s /data/solo /usr/src/agoric-sdk/packages/cosmic-swingset/solo -RUN ln -s /data/chains /usr/src/agoric-sdk/packages/cosmic-swingset/chains +RUN ln -sf /data /agoric +RUN ln -sf /data/solo /usr/src/agoric-sdk/packages/cosmic-swingset/solo +RUN ln -sf /data/chains /usr/src/agoric-sdk/packages/cosmic-swingset/chains -RUN /usr/src/agoric-sdk/packages/deployment/scripts/smoketest-binaries.sh +RUN /usr/src/agoric-sdk/scripts/smoketest-binaries.sh # By default, run the daemon with specified arguments. WORKDIR /root diff --git a/packages/deployment/Makefile b/packages/deployment/Makefile index d718079ea6b..0f9316a9929 100644 --- a/packages/deployment/Makefile +++ b/packages/deployment/Makefile @@ -5,7 +5,7 @@ VERSION := $(shell node -e 'console.log(require("../../package.json").version??" TAG := $(if $(VERSION),$(VERSION),latest) GIT_REVISION := $(shell hash=$$(git rev-parse --short HEAD); \ - dirty=`git diff --quiet || echo -dirty`; \ + dirty=`git diff --quiet 2>/dev/null || echo -dirty`; \ echo "$$hash$$dirty") # Don't push alpha tags as ":$(TAG)". diff --git a/packages/deployment/ansible/roles/copy/tasks/main.yml b/packages/deployment/ansible/roles/copy/tasks/main.yml index 217bc184635..75ab448daab 100644 --- a/packages/deployment/ansible/roles/copy/tasks/main.yml +++ b/packages/deployment/ansible/roles/copy/tasks/main.yml @@ -7,15 +7,14 @@ use_ssh_args: true when: HELPER_BINARY is defined -- name: Synchronize Agoric SDK +- name: 'Synchronize Agoric SDK {{ AGORIC_SDK }}' synchronize: src: '{{ AGORIC_SDK }}/' dest: '/usr/src/agoric-sdk/' - # dirs: yes delete: yes - rsync_opts: - - "--partial-dir=/usr/src/agoric-sdk/.rsync-partial/{{ hostvars[inventory_hostname]['ansible_host'] | default(inventory_hostname) }}" - - '--exclude=.rsync-partial' + #rsync_opts: + # - "--partial-dir=/usr/src/agoric-sdk/.rsync-partial/{{ hostvars[inventory_hostname]['ansible_host'] | default(inventory_hostname) }}" + # - '--exclude=.rsync-partial' # checksum: yes mode: push use_ssh_args: true diff --git a/packages/deployment/docker/ag-setup-cosmos b/packages/deployment/docker/ag-setup-cosmos index 0f2466ed778..79e380b19a6 100755 --- a/packages/deployment/docker/ag-setup-cosmos +++ b/packages/deployment/docker/ag-setup-cosmos @@ -1,5 +1,4 @@ #! /bin/sh -PORT=26657 NETWORK_NAME=${NETWORK_NAME-agoric} SETUP_HOME=${SETUP_HOME-$NETWORK_NAME} IMAGE=ghcr.io/agoric/cosmic-swingset-setup:${TAG-latest} @@ -21,10 +20,15 @@ show-*) TTY=-i ;; esac +setup_volume= +if test -f deployment.json; then + setup_volume=--volume="$PWD:/data/chains/$SETUP_HOME" +fi exec docker run --rm $TTY $FLAGS \ --volume=ag-setup-cosmos-chains:/data/chains \ --volume=ag-chain-cosmos-state:/root/.ag-chain-cosmos \ --volume=/var/run/docker.sock:/var/run/docker.sock \ + $setup_volume \ --env AGD_HOME=/root/.ag-chain-cosmos \ --env AG_SETUP_COSMOS_NAME=$NETWORK_NAME \ --env AG_SETUP_COSMOS_HOME=$SETUP_HOME \ diff --git a/packages/deployment/scripts/integration-test.sh b/packages/deployment/scripts/integration-test.sh index 913d300610f..cdec5352f15 100755 --- a/packages/deployment/scripts/integration-test.sh +++ b/packages/deployment/scripts/integration-test.sh @@ -7,6 +7,16 @@ thisdir=$(cd "$(dirname -- "$real0")" > /dev/null && pwd -P) export GOBIN="$thisdir/../../../golang/cosmos/build" export NETWORK_NAME=${NETWORK_NAME-localtest} +DEFAULT_LOADGEN=/usr/src/testnet-load-generator +LOADGEN=${LOADGEN-""} +if [ -n "$LOADGEN" ]; then + LOADGEN=$(cd "$LOADGEN" > /dev/null && pwd -P) +elif [ -d "$DEFAULT_LOADGEN" ]; then + LOADGEN=$(cd "$DEFAULT_LOADGEN" > /dev/null && pwd -P) +else + LOADGEN= +fi + SOLO_ADDR= VAT_CONFIG= RESULTSDIR=${RESULTSDIR-"$NETWORK_NAME/results"} @@ -23,17 +33,15 @@ export AG_SETUP_COSMOS_HOME=${AG_SETUP_COSMOS_HOME-$PWD} export AG_SETUP_COSMOS_STATE_SYNC_INTERVAL=20 SDK_SRC=${SDK_SRC-$(cd "$thisdir/../../.." > /dev/null && pwd -P)} -if [ -d /usr/src/testnet-load-generator ] +if [ -n "$LOADGEN" ] then - solodir=/usr/src/testnet-load-generator/_agstate/agoric-servers/testnet-8000 + solodir="$LOADGEN"/_agstate/agoric-servers/testnet-8000 "$thisdir/../../solo/bin/ag-solo" init "$solodir" --webport=8000 SOLO_ADDR=$(cat "$solodir/ag-cosmos-helper-address") VAT_CONFIG="@agoric/vats/decentral-demo-config.json" fi -# Speed up the docker deployment by pre-mounting /usr/src/agoric-sdk. -DOCKER_VOLUMES="$AGORIC_SDK_PATH:/usr/src/agoric-sdk" \ - "$thisdir/docker-deployment.cjs" > deployment.json +"$thisdir/docker-deployment.cjs" > deployment.json # Set up the network from our above deployment.json. "$thisdir/setup.sh" init --noninteractive @@ -44,16 +52,12 @@ VAULT_FACTORY_CONTROLLER_ADDR="$SOLO_ADDR" \ CHAIN_BOOTSTRAP_VAT_CONFIG="$VAT_CONFIG" \ "$thisdir/setup.sh" bootstrap ${1+"$@"} -loadgen="$PWD/testnet-load-genereator" -if [ ! -d "$loadgen" ]; then - loadgen=/usr/src/testnet-load-generator -fi -if [ -d "$loadgen" ] +if [ -n "$LOADGEN" ] then "$SDK_SRC/packages/deployment/scripts/setup.sh" show-config > "$RESULTSDIR/network-config" cp ag-chain-cosmos/data/genesis.json "$RESULTSDIR/genesis.json" cp "$AG_SETUP_COSMOS_HOME/ag-chain-cosmos/data/genesis.json" "$RESULTSDIR/genesis.json" - cd "$loadgen" + cd "$LOADGEN" SOLO_COINS=40000000000uist \ "$AG_SETUP_COSMOS_HOME/faucet-helper.sh" add-egress loadgen "$SOLO_ADDR" SLOGSENDER=@agoric/telemetry/src/otel-trace.js SOLO_SLOGSENDER="" \ diff --git a/packages/deployment/scripts/setup.sh b/packages/deployment/scripts/setup.sh index 8be0a710a20..8dcb79466ea 100755 --- a/packages/deployment/scripts/setup.sh +++ b/packages/deployment/scripts/setup.sh @@ -11,8 +11,7 @@ export NETWORK_NAME export AG_SETUP_COSMOS_NAME=$NETWORK_NAME export AG_SETUP_COSMOS_HOME=${AG_SETUP_COSMOS_HOME-"$PWD/$NETWORK_NAME/setup"} -# Put our bindir into the PATH so that children can find ag-setup-cosmos. +# Put GOBIN into the PATH so that children can find ag-setup-cosmos. export PATH="$thisdir/../bin:${GOBIN-${GOPATH-/usr/local}/bin}:$PATH" -# Run our setup command. exec ag-setup-cosmos ${1+"$@"} diff --git a/packages/deployment/src/init.js b/packages/deployment/src/init.js index 981de84cb7f..bea22fc5ceb 100644 --- a/packages/deployment/src/init.js +++ b/packages/deployment/src/init.js @@ -493,7 +493,7 @@ const doInit = config.OFFSETS[PLACEMENT] = offset; } Object.values(ROLE_INSTANCE).some(i => i > 0) || - Fail`Aborting due to no nodes configured!`; + Fail`Aborting due to no nodes configured! (${ROLE_INSTANCE})`; await wr.createFile( `vars.tf`, diff --git a/packages/deployment/scripts/smoketest-binaries.sh b/scripts/smoketest-binaries.sh similarity index 100% rename from packages/deployment/scripts/smoketest-binaries.sh rename to scripts/smoketest-binaries.sh From 66f7bccce7ce30cf5b9e1e5321710567c05723cb Mon Sep 17 00:00:00 2001 From: Jim Larson Date: Thu, 29 Jun 2023 14:46:01 -0700 Subject: [PATCH 034/109] feat: Cosmos upgrade handler calls swingset --- golang/cosmos/app/app.go | 63 +++++++++++--------- golang/cosmos/app/const.go | 4 +- packages/cosmic-swingset/src/launch-chain.js | 6 ++ packages/internal/src/action-types.js | 1 + 4 files changed, 45 insertions(+), 29 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 1f26380e46d..6bcb5d14d6c 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -744,11 +744,11 @@ func NewAgoricApp( app.UpgradeKeeper.SetUpgradeHandler( upgradeName, - upgrade10Handler(app, upgradeName), + upgrade11Handler(app, upgradeName, callToController), ) app.UpgradeKeeper.SetUpgradeHandler( upgradeNameTest, - upgrade10Handler(app, upgradeNameTest), + upgrade11Handler(app, upgradeNameTest, callToController), ) if loadLatest { @@ -771,39 +771,48 @@ func NewAgoricApp( return app } -func upgrade10Handler(app *GaiaApp, targetUpgrade string) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { - return func(ctx sdk.Context, plan upgradetypes.Plan, fromVm module.VersionMap) (module.VersionMap, error) { - // change bootrap gov parameter to correct vaults parameter - - prevParams := app.SwingSetKeeper.GetParams(ctx) - - ctx.Logger().Info("Pre-upgrade swingset params", "BeansPerUnit", fmt.Sprintf("%v", prevParams.BeansPerUnit), "BootstrapVatConfig", prevParams.BootstrapVatConfig) - - switch targetUpgrade { - case upgradeName: - prevParams.BootstrapVatConfig = "@agoric/vats/decentral-main-vaults-config.json" - case upgradeNameTest: - prevParams.BootstrapVatConfig = "@agoric/vats/decentral-test-vaults-config.json" - default: - return fromVm, fmt.Errorf("invalid upgrade name") - } +// swingsetUpgradeAction is the action to run swingset upgrade handlers. +type swingsetUpgradeAction struct { + // Type must be enactUpgradePlanType + Type string `json:"type"` + // Plan is the upgrade plan + Plan upgradetypes.Plan `json:"plan"` +} - app.SwingSetKeeper.SetParams(ctx, prevParams) - ctx.Logger().Info("Post-upgrade swingset params", "BeansPerUnit", fmt.Sprintf("%v", prevParams.BeansPerUnit), "BootstrapVatConfig", prevParams.BootstrapVatConfig) +const enactUpgradePlanType = "ENACT_UPGRADE_PLAN" - app.VstorageKeeper.MigrateNoDataPlaceholders(ctx) // upgrade-10 only - normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ProvisionPoolName) - normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ReservePoolName) +// swingsetUpgrade tells swingset to execute the upgrade plan. +func swingsetUpgrade(ctx sdk.Context, plan upgradetypes.Plan, callToController func(ctx sdk.Context, str string) (string, error)) error { + action := swingsetUpgradeAction{ + Type: enactUpgradePlanType, + Plan: plan, + } + bz, err := json.Marshal(action) + if err != nil { + return err + } + _, err = callToController(ctx, string(bz)) + if err != nil { + return err + } + return nil +} +// upgrade11Handler performs standard upgrade actions plus custom actions for upgrade-11. +func upgrade11Handler(app *GaiaApp, targetUpgrade string, callToController func(ctx sdk.Context, str string) (string, error)) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { + return func(ctx sdk.Context, plan upgradetypes.Plan, fromVm module.VersionMap) (module.VersionMap, error) { + // Always run module migrations mvm, err := app.mm.RunMigrations(ctx, app.configurator, fromVm) if err != nil { return mvm, err } - // Just run the SwingSet kernel to finish bootstrap and get ready to open for - // business. - stdlog.Println("Rebooting SwingSet") - return mvm, swingset.BootSwingset(ctx, app.SwingSetKeeper) + // Lastly, let Swingset reaction to the upgrade + err = swingsetUpgrade(ctx, plan, callToController) + if err != nil { + return mvm, err + } + return mvm, nil } } diff --git a/golang/cosmos/app/const.go b/golang/cosmos/app/const.go index 15dd38b8b70..2e7f4a2f391 100644 --- a/golang/cosmos/app/const.go +++ b/golang/cosmos/app/const.go @@ -1,6 +1,6 @@ package gaia const ( - upgradeName = "agoric-upgrade-10" - upgradeNameTest = "agorictest-upgrade-10" + upgradeName = "agoric-upgrade-11" + upgradeNameTest = "agorictest-upgrade-11" ) diff --git a/packages/cosmic-swingset/src/launch-chain.js b/packages/cosmic-swingset/src/launch-chain.js index 0699c39bba7..95636eb0f5e 100644 --- a/packages/cosmic-swingset/src/launch-chain.js +++ b/packages/cosmic-swingset/src/launch-chain.js @@ -843,6 +843,12 @@ export async function launch({ return undefined; } + case ActionType.ENACT_UPGRADE_PLAN: { + // Dispatch to upgrade-specific handler + // const { plan } = action; + // const { name, height, info } = plan; + return undefined; + } default: { throw Fail`Unrecognized action ${action}; are you sure you didn't mean to queue it?`; diff --git a/packages/internal/src/action-types.js b/packages/internal/src/action-types.js index 2d5a3d77ce2..e5ed9a05dcf 100644 --- a/packages/internal/src/action-types.js +++ b/packages/internal/src/action-types.js @@ -15,3 +15,4 @@ export const VBANK_BALANCE_UPDATE = 'VBANK_BALANCE_UPDATE'; export const WALLET_ACTION = 'WALLET_ACTION'; export const WALLET_SPEND_ACTION = 'WALLET_SPEND_ACTION'; export const INSTALL_BUNDLE = 'INSTALL_BUNDLE'; +export const ENACT_UPGRADE_PLAN = 'ENACT_UPGRADE_PLAN'; From 9ce286ef6bff631a337cc364461c604f47bf2d2a Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 12 Jul 2023 15:42:33 +0000 Subject: [PATCH 035/109] chore(x/vstorage): remove nodata migration logic --- golang/cosmos/x/vstorage/keeper/keeper.go | 24 ----------- .../cosmos/x/vstorage/keeper/keeper_test.go | 41 ------------------- 2 files changed, 65 deletions(-) diff --git a/golang/cosmos/x/vstorage/keeper/keeper.go b/golang/cosmos/x/vstorage/keeper/keeper.go index ea6eca0797e..9a91e793b10 100644 --- a/golang/cosmos/x/vstorage/keeper/keeper.go +++ b/golang/cosmos/x/vstorage/keeper/keeper.go @@ -181,30 +181,6 @@ func (k Keeper) ImportStorage(ctx sdk.Context, entries []*types.DataEntry) { } } -func (k Keeper) MigrateNoDataPlaceholders(ctx sdk.Context) { - store := ctx.KVStore(k.storeKey) - - iterator := sdk.KVStorePrefixIterator(store, nil) - - // Copy empty keys first since cosmos stores do not support writing keys - // while an iterator is open over the domain - emptyKeys := [][]byte{} - for ; iterator.Valid(); iterator.Next() { - rawValue := iterator.Value() - if bytes.Equal(rawValue, types.EncodedDataPrefix) { - key := iterator.Key() - clonedKey := make([]byte, len(key)) - copy(clonedKey, key) - emptyKeys = append(emptyKeys, clonedKey) - } - } - iterator.Close() - - for _, key := range emptyKeys { - store.Set(key, types.EncodedNoDataValue) - } -} - func (k Keeper) EmitChange(ctx sdk.Context, change *ProposedChange) { if change.NewValue == change.ValueFromLastBlock { // No change. diff --git a/golang/cosmos/x/vstorage/keeper/keeper_test.go b/golang/cosmos/x/vstorage/keeper/keeper_test.go index d53c67ee842..1a2ce5d58ee 100644 --- a/golang/cosmos/x/vstorage/keeper/keeper_test.go +++ b/golang/cosmos/x/vstorage/keeper/keeper_test.go @@ -273,44 +273,3 @@ func TestStorageNotify(t *testing.T) { t.Errorf("got after second flush events %#v, want %#v", got, expectedAfterFlushEvents) } } - -func TestStorageMigrate(t *testing.T) { - testKit := makeTestKit() - ctx, keeper := testKit.ctx, testKit.vstorageKeeper - - // Simulate a pre-migration storage with empty string as placeholders - keeper.SetStorage(ctx, types.NewStorageEntry("key1", "value1")) - keeper.SetStorage(ctx, types.NewStorageEntry("key1.child1.grandchild1", "value1grandchild")) - keeper.SetStorage(ctx, types.NewStorageEntry("key1.child1", "")) - - // Do a deep set. - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2.grandchild2", "value2grandchild")) - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2.grandchild2a", "value2grandchilda")) - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2", "")) - keeper.SetStorage(ctx, types.NewStorageEntry("key2", "")) - - keeper.MigrateNoDataPlaceholders(ctx) - - if keeper.HasStorage(ctx, "key1.child1") { - t.Errorf("has key1.child1, want no value") - } - if keeper.HasStorage(ctx, "key2.child2") { - t.Errorf("has key2.child2, want no value") - } - if keeper.HasStorage(ctx, "key2") { - t.Errorf("has key2, want no value") - } - - // Check the export. - expectedExport := []*types.DataEntry{ - {Path: "key1", Value: "value1"}, - {Path: "key1.child1.grandchild1", Value: "value1grandchild"}, - {Path: "key2.child2.grandchild2", Value: "value2grandchild"}, - {Path: "key2.child2.grandchild2a", Value: "value2grandchilda"}, - } - got := keeper.ExportStorage(ctx) - if !reflect.DeepEqual(got, expectedExport) { - t.Errorf("got export %q, want %q", got, expectedExport) - } - keeper.ImportStorage(ctx, got) -} From 452bdb8201841aeb3183d25be7bc760b330daf77 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Mon, 3 Jul 2023 14:07:42 -0600 Subject: [PATCH 036/109] feat(deployment): `docker/integration-test.sh` for nested execution --- packages/deployment/docker/ag-setup-cosmos | 4 +- .../deployment/docker/integration-test.sh | 38 +++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) create mode 100755 packages/deployment/docker/integration-test.sh diff --git a/packages/deployment/docker/ag-setup-cosmos b/packages/deployment/docker/ag-setup-cosmos index 79e380b19a6..7404290420e 100755 --- a/packages/deployment/docker/ag-setup-cosmos +++ b/packages/deployment/docker/ag-setup-cosmos @@ -21,7 +21,9 @@ show-*) ;; esac setup_volume= -if test -f deployment.json; then +if test -f "$PWD/$SETUP_HOME/setup/deployment.json"; then + setup_volume=--volume="$PWD/$SETUP_HOME/setup:/data/chains/$SETUP_HOME" +elif test -f deployment.json; then setup_volume=--volume="$PWD:/data/chains/$SETUP_HOME" fi exec docker run --rm $TTY $FLAGS \ diff --git a/packages/deployment/docker/integration-test.sh b/packages/deployment/docker/integration-test.sh new file mode 100755 index 00000000000..663eb9a4972 --- /dev/null +++ b/packages/deployment/docker/integration-test.sh @@ -0,0 +1,38 @@ +#! /bin/sh +NETWORK_NAME=${NETWORK_NAME-localtest} +SETUP_HOME=${SETUP_HOME-$NETWORK_NAME} +IMAGE=ghcr.io/agoric/cosmic-swingset-setup:${TAG-latest} +TTY=-i +test -t 0 && test -t 1 && TTY=-it +FLAGS=--entrypoint=/bin/bash +case "$1" in +--pull) + shift + docker pull "$IMAGE" + ;; +esac +case "$1" in +shell) shift ;; +*) + set /usr/src/agoric-sdk/packages/deployment/scripts/integration-test.sh ${1+"$@"} + ;; +esac + +setup_volume= +if test -f "$PWD/$SETUP_HOME/setup/deployment.json"; then + setup_volume=--volume="$PWD/$SETUP_HOME/setup:/data/chains/$SETUP_HOME" +elif test -f deployment.json; then + setup_volume=--volume="$PWD:/data/chains/$SETUP_HOME" +fi +if test -n "$LOADGEN"; then + setup_volume="$setup_volume --volume=$LOADGEN:/usr/src/testnet-load-generator" +fi +exec docker run --rm $TTY $FLAGS \ + --volume=ag-setup-cosmos-chains:/data/chains \ + --volume=ag-chain-cosmos-state:/root/.ag-chain-cosmos \ + --volume=/var/run/docker.sock:/var/run/docker.sock \ + $setup_volume \ + --env AGD_HOME=/root/.ag-chain-cosmos \ + --env NETWORK_NAME=$NETWORK_NAME \ + -w /data/chains \ + "$IMAGE" ${1+"$@"} From eeb4f4a50932963e3e41d31d9b21ca4be5fb3b4d Mon Sep 17 00:00:00 2001 From: Jim Larson Date: Mon, 10 Jul 2023 23:41:50 -0700 Subject: [PATCH 037/109] fix: test upgrade to one-past-vaults --- packages/deployment/upgrade-test/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index ffe27896c4c..b00ccf38aac 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -59,7 +59,7 @@ ARG DEST_IMAGE #this is agoric-upgrade-10 / vaults FROM ghcr.io/agoric/agoric-sdk:34 as agoric-upgrade-10 ARG BOOTSTRAP_MODE -ENV THIS_NAME=agoric-upgrade-10 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} +ENV THIS_NAME=agoric-upgrade-10 UPGRADE_TO=agoric-upgrade-11 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ From cea76063871123c26e20a487f3a11d44cd04c20b Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Fri, 30 Jun 2023 19:34:09 +0000 Subject: [PATCH 038/109] upgrade-test: do not overwrite DEST_IMAGE when set --- packages/deployment/upgrade-test/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/Makefile b/packages/deployment/upgrade-test/Makefile index 0f70f0fa9ff..f5a55e66ba0 100644 --- a/packages/deployment/upgrade-test/Makefile +++ b/packages/deployment/upgrade-test/Makefile @@ -1,6 +1,6 @@ REPOSITORY = agoric/upgrade-test # use :dev (latest prerelease image) unless we build local sdk -DEST_IMAGE = $(if $(findstring local_sdk,$(MAKECMDGOALS)),ghcr.io/agoric/agoric-sdk:latest,ghcr.io/agoric/agoric-sdk:dev) +DEST_IMAGE ?= $(if $(findstring local_sdk,$(MAKECMDGOALS)),ghcr.io/agoric/agoric-sdk:latest,ghcr.io/agoric/agoric-sdk:dev) BOOTSTRAP_MODE?=main TARGET?=agoric-upgrade-11 dockerLabel?=$(TARGET) From 3ce4012ea99b39b2d6bfd422b1d7ea7a7e904568 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 13 Jul 2023 20:37:32 +0000 Subject: [PATCH 039/109] fix(cosmos): module order independent init and bootstrap --- golang/cosmos/app/app.go | 83 +++++++++++++++++++++++------ golang/cosmos/x/swingset/genesis.go | 36 ++----------- golang/cosmos/x/swingset/module.go | 16 ++++-- 3 files changed, 83 insertions(+), 52 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 823074aa5e7..5951dad0a6f 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -8,6 +8,7 @@ import ( "net/http" "os" "path/filepath" + "runtime/debug" "time" "github.com/cosmos/cosmos-sdk/baseapp" @@ -21,6 +22,7 @@ import ( servertypes "github.com/cosmos/cosmos-sdk/server/types" "github.com/cosmos/cosmos-sdk/simapp" sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/errors" "github.com/cosmos/cosmos-sdk/types/module" "github.com/cosmos/cosmos-sdk/version" "github.com/cosmos/cosmos-sdk/x/auth" @@ -196,6 +198,7 @@ type GaiaApp struct { // nolint: golint interfaceRegistry types.InterfaceRegistry controllerInited bool + bootstrapNeeded bool lienPort int vbankPort int vibcPort int @@ -431,12 +434,16 @@ func NewAgoricApp( // This function is tricky to get right, so we build it ourselves. callToController := func(ctx sdk.Context, str string) (string, error) { + app.CheckControllerInited(true) // We use SwingSet-level metering to charge the user for the call. - app.MustInitController(ctx) defer vm.SetControllerContext(ctx)() return sendToController(true, str) } + setBootstrapNeeded := func() { + app.bootstrapNeeded = true + } + app.VstorageKeeper = vstorage.NewKeeper( keys[vstorage.StoreKey], ) @@ -581,7 +588,7 @@ func NewAgoricApp( transferModule, icaModule, vstorage.NewAppModule(app.VstorageKeeper), - swingset.NewAppModule(app.SwingSetKeeper), + swingset.NewAppModule(app.SwingSetKeeper, setBootstrapNeeded), vibcModule, vbankModule, lienModule, @@ -808,6 +815,7 @@ func normalizeModuleAccount(ctx sdk.Context, ak authkeeper.AccountKeeper, name s type cosmosInitAction struct { Type string `json:"type"` ChainID string `json:"chainID"` + IsBootstrap bool `json:"isBootstrap"` Params swingset.Params `json:"params"` SupplyCoins sdk.Coins `json:"supplyCoins"` UpgradePlan *upgradetypes.Plan `json:"upgradePlan,omitempty"` @@ -820,15 +828,25 @@ type cosmosInitAction struct { // Name returns the name of the App func (app *GaiaApp) Name() string { return app.BaseApp.Name() } -func (app *GaiaApp) MustInitController(ctx sdk.Context) { - if app.controllerInited { - return +// CheckControllerInited exits if the controller initialization state does not match `expected`. +func (app *GaiaApp) CheckControllerInited(expected bool) { + if app.controllerInited != expected { + fmt.Fprintf(os.Stderr, "controllerInited != %t\n", expected) + debug.PrintStack() + os.Exit(1) } +} + +// initController sends the initialization message to the VM. +// Exits if the controller has already been initialized. +func (app *GaiaApp) initController(ctx sdk.Context, bootstrap bool) { + app.CheckControllerInited(false) app.controllerInited = true // Begin initializing the controller here. action := &cosmosInitAction{ Type: "AG_COSMOS_INIT", ChainID: ctx.ChainID(), + IsBootstrap: bootstrap, Params: app.SwingSetKeeper.GetParams(ctx), SupplyCoins: sdk.NewCoins(app.BankKeeper.GetSupply(ctx, "uist")), UpgradePlan: app.upgradePlan, @@ -837,28 +855,51 @@ func (app *GaiaApp) MustInitController(ctx sdk.Context) { VbankPort: app.vbankPort, VibcPort: app.vibcPort, } + // This really abuses `BlockingSend` to get back at `sendToController` out, err := app.SwingSetKeeper.BlockingSend(ctx, action) // fmt.Fprintf(os.Stderr, "AG_COSMOS_INIT Returned from SwingSet: %s, %v\n", out, err) if err != nil { - fmt.Fprintln(os.Stderr, "Cannot initialize Controller", err) - os.Exit(1) + panic(errors.Wrap(err, "cannot initialize Controller")) } var res bool err = json.Unmarshal([]byte(out), &res) if err != nil { - fmt.Fprintln(os.Stderr, "Cannot unmarshal Controller init response", out, err) - os.Exit(1) + panic(errors.Wrapf(err, "cannot unmarshal Controller init response: %s", out)) } if !res { - fmt.Fprintln(os.Stderr, "Controller negative init response") - os.Exit(1) + panic(fmt.Errorf("controller negative init response")) + } +} + +type bootstrapBlockAction struct { + Type string `json:"type"` + BlockTime int64 `json:"blockTime"` +} + +// BootstrapController initializes the controller (with the bootstrap flag) and sends a bootstrap action. +func (app *GaiaApp) BootstrapController(ctx sdk.Context) error { + app.initController(ctx, true) + + stdlog.Println("Running SwingSet until bootstrap is ready") + // Just run the SwingSet kernel to finish bootstrap and get ready to open for + // business. + action := &bootstrapBlockAction{ + Type: "BOOTSTRAP_BLOCK", + BlockTime: ctx.BlockTime().Unix(), } + + _, err := app.SwingSetKeeper.BlockingSend(ctx, action) + return err } // BeginBlocker application updates every begin block func (app *GaiaApp) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + if !app.controllerInited { + app.initController(ctx, false) + } + return app.mm.BeginBlock(ctx, req) } @@ -877,6 +918,21 @@ func (app *GaiaApp) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci app.UpgradeKeeper.SetModuleVersionMap(ctx, app.mm.GetVersionMap()) res := app.mm.InitGenesis(ctx, app.appCodec, genesisState) + // initialize the provision and reserve module accounts, to avoid their implicit creation + // as a default account upon receiving a transfer. See BlockedAddrs(). + normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ProvisionPoolName) + normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ReservePoolName) + + if app.bootstrapNeeded { + err := app.BootstrapController(ctx) + // fmt.Fprintf(os.Stderr, "BOOTSTRAP_BLOCK Returned from swingset: %s, %v\n", out, err) + if err != nil { + // NOTE: A failed BOOTSTRAP_BLOCK means that the SwingSet state is inconsistent. + // Panic here, in the hopes that a replay from scratch will fix the problem. + panic(err) + } + } + // Agoric: report the genesis time explicitly. genTime := req.GetTime() if genTime.After(time.Now()) { @@ -884,11 +940,6 @@ func (app *GaiaApp) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci stdlog.Printf("Genesis time %s is in %s\n", genTime, d) } - // initialize the provision and reserve module accounts, to avoid their implicit creation - // as a default account upon receiving a transfer. See BockedAddrs(). - normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ProvisionPoolName) - normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ReservePoolName) - return res } diff --git a/golang/cosmos/x/swingset/genesis.go b/golang/cosmos/x/swingset/genesis.go index ccdfd9468da..59a2725a263 100644 --- a/golang/cosmos/x/swingset/genesis.go +++ b/golang/cosmos/x/swingset/genesis.go @@ -3,11 +3,9 @@ package swingset import ( // "os" "fmt" - stdlog "log" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" sdk "github.com/cosmos/cosmos-sdk/types" - abci "github.com/tendermint/tendermint/abci/types" ) func NewGenesisState() *types.GenesisState { @@ -30,38 +28,14 @@ func DefaultGenesisState() *types.GenesisState { } } -type bootstrapBlockAction struct { - Type string `json:"type"` - BlockTime int64 `json:"blockTime"` -} - -func BootSwingset(ctx sdk.Context, keeper Keeper) error { - // Just run the SwingSet kernel to finish bootstrap and get ready to open for - // business. - action := &bootstrapBlockAction{ - Type: "BOOTSTRAP_BLOCK", - BlockTime: ctx.BlockTime().Unix(), - } - - _, err := keeper.BlockingSend(ctx, action) - return err -} - -func InitGenesis(ctx sdk.Context, keeper Keeper, data *types.GenesisState) []abci.ValidatorUpdate { +// InitGenesis initializes the (Cosmos-side) SwingSet state from the GenesisState. +// Returns whether the app should send a bootstrap action to the controller. +func InitGenesis(ctx sdk.Context, keeper Keeper, data *types.GenesisState) bool { keeper.SetParams(ctx, data.GetParams()) keeper.SetState(ctx, data.GetState()) - stdlog.Println("Running SwingSet until bootstrap is ready") - err := BootSwingset(ctx, keeper) - - // fmt.Fprintf(os.Stderr, "BOOTSTRAP_BLOCK Returned from swingset: %s, %v\n", out, err) - if err != nil { - // NOTE: A failed BOOTSTRAP_BLOCK means that the SwingSet state is inconsistent. - // Panic here, in the hopes that a replay from scratch will fix the problem. - panic(err) - } - - return []abci.ValidatorUpdate{} + // TODO: bootstrap only if not restoring swing-store from genesis state + return true } func ExportGenesis(ctx sdk.Context, k Keeper) *types.GenesisState { diff --git a/golang/cosmos/x/swingset/module.go b/golang/cosmos/x/swingset/module.go index 8e3f030ccf9..9e38f05e864 100644 --- a/golang/cosmos/x/swingset/module.go +++ b/golang/cosmos/x/swingset/module.go @@ -80,14 +80,16 @@ func (AppModuleBasic) GetTxCmd() *cobra.Command { type AppModule struct { AppModuleBasic - keeper Keeper + keeper Keeper + setBootstrapNeeded func() } // NewAppModule creates a new AppModule Object -func NewAppModule(k Keeper) AppModule { +func NewAppModule(k Keeper, setBootstrapNeeded func()) AppModule { am := AppModule{ - AppModuleBasic: AppModuleBasic{}, - keeper: k, + AppModuleBasic: AppModuleBasic{}, + keeper: k, + setBootstrapNeeded: setBootstrapNeeded, } return am } @@ -147,7 +149,11 @@ func (am AppModule) EndBlock(ctx sdk.Context, req abci.RequestEndBlock) []abci.V func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, data json.RawMessage) []abci.ValidatorUpdate { var genesisState types.GenesisState cdc.MustUnmarshalJSON(data, &genesisState) - return InitGenesis(ctx, am.keeper, &genesisState) + bootstrapNeeded := InitGenesis(ctx, am.keeper, &genesisState) + if bootstrapNeeded { + am.setBootstrapNeeded() + } + return []abci.ValidatorUpdate{} } func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { From 4f2fc0f41dd7e2076d9e885184c54036992cc158 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 18 Jul 2023 18:02:41 -0600 Subject: [PATCH 040/109] fix(deployment-integration): reenable DOCKER_VOLUMES --- .github/workflows/deployment-test.yml | 5 ++++- scripts/run-deployment-integration.sh | 3 ++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deployment-test.yml b/.github/workflows/deployment-test.yml index 794b82dfe74..3aecefc2d6e 100644 --- a/.github/workflows/deployment-test.yml +++ b/.github/workflows/deployment-test.yml @@ -69,7 +69,10 @@ jobs: set -e cd packages/cosmic-swingset make install - - run: packages/deployment/scripts/integration-test.sh + - run: | + set -xe + DOCKER_VOLUMES="$PWD:/usr/src/agoric-sdk" \ + packages/deployment/scripts/integration-test.sh timeout-minutes: 90 env: NETWORK_NAME: chaintest diff --git a/scripts/run-deployment-integration.sh b/scripts/run-deployment-integration.sh index 04b5b9c5e95..66a8c9ecb70 100644 --- a/scripts/run-deployment-integration.sh +++ b/scripts/run-deployment-integration.sh @@ -19,7 +19,8 @@ sudo ./packages/deployment/scripts/install-deps.sh yarn install && XSNAP_RANDOM_INIT=1 yarn build && make -C packages/cosmic-swingset/ # change to "false" to skip extraction on success like in CI testfailure="unknown" -/usr/src/agoric-sdk/packages/deployment/scripts/integration-test.sh || { +DOCKER_VOLUMES="$AGORIC_SDK_PATH:/usr/src/agoric-sdk" \ +packages/deployment/scripts/integration-test.sh || { echo "Test failed!!!" testfailure="true" } From 958c3eefecc67c673672c3e7c067c2de9aaf0087 Mon Sep 17 00:00:00 2001 From: Jim Larson Date: Tue, 11 Jul 2023 00:08:30 -0700 Subject: [PATCH 041/109] refactor: put upgrade plan in cosmos init action --- golang/cosmos/app/app.go | 55 ++++++-------------- packages/cosmic-swingset/src/launch-chain.js | 6 --- packages/internal/src/action-types.js | 1 - 3 files changed, 15 insertions(+), 47 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 6bcb5d14d6c..c646bb5ae63 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -201,6 +201,8 @@ type GaiaApp struct { // nolint: golint vibcPort int vstoragePort int + upgradePlan *upgradetypes.Plan + invCheckPeriod uint // keys to access the substores @@ -771,47 +773,18 @@ func NewAgoricApp( return app } -// swingsetUpgradeAction is the action to run swingset upgrade handlers. -type swingsetUpgradeAction struct { - // Type must be enactUpgradePlanType - Type string `json:"type"` - // Plan is the upgrade plan - Plan upgradetypes.Plan `json:"plan"` -} - -const enactUpgradePlanType = "ENACT_UPGRADE_PLAN" - -// swingsetUpgrade tells swingset to execute the upgrade plan. -func swingsetUpgrade(ctx sdk.Context, plan upgradetypes.Plan, callToController func(ctx sdk.Context, str string) (string, error)) error { - action := swingsetUpgradeAction{ - Type: enactUpgradePlanType, - Plan: plan, - } - bz, err := json.Marshal(action) - if err != nil { - return err - } - _, err = callToController(ctx, string(bz)) - if err != nil { - return err - } - return nil -} - // upgrade11Handler performs standard upgrade actions plus custom actions for upgrade-11. func upgrade11Handler(app *GaiaApp, targetUpgrade string, callToController func(ctx sdk.Context, str string) (string, error)) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { return func(ctx sdk.Context, plan upgradetypes.Plan, fromVm module.VersionMap) (module.VersionMap, error) { + // Record the plan to send to SwingSet + app.upgradePlan = &plan + // Always run module migrations mvm, err := app.mm.RunMigrations(ctx, app.configurator, fromVm) if err != nil { return mvm, err } - // Lastly, let Swingset reaction to the upgrade - err = swingsetUpgrade(ctx, plan, callToController) - if err != nil { - return mvm, err - } return mvm, nil } } @@ -834,14 +807,15 @@ func normalizeModuleAccount(ctx sdk.Context, ak authkeeper.AccountKeeper, name s } type cosmosInitAction struct { - Type string `json:"type"` - ChainID string `json:"chainID"` - Params swingset.Params `json:"params"` - StoragePort int `json:"storagePort"` - SupplyCoins sdk.Coins `json:"supplyCoins"` - VibcPort int `json:"vibcPort"` - VbankPort int `json:"vbankPort"` - LienPort int `json:"lienPort"` + Type string `json:"type"` + ChainID string `json:"chainID"` + Params swingset.Params `json:"params"` + StoragePort int `json:"storagePort"` + SupplyCoins sdk.Coins `json:"supplyCoins"` + VibcPort int `json:"vibcPort"` + VbankPort int `json:"vbankPort"` + LienPort int `json:"lienPort"` + UpgradePlan *upgradetypes.Plan `json:"upgradePlan,omitempty"` } // Name returns the name of the App @@ -862,6 +836,7 @@ func (app *GaiaApp) MustInitController(ctx sdk.Context) { VibcPort: app.vibcPort, VbankPort: app.vbankPort, LienPort: app.lienPort, + UpgradePlan: app.upgradePlan, } out, err := app.SwingSetKeeper.BlockingSend(ctx, action) diff --git a/packages/cosmic-swingset/src/launch-chain.js b/packages/cosmic-swingset/src/launch-chain.js index 95636eb0f5e..0699c39bba7 100644 --- a/packages/cosmic-swingset/src/launch-chain.js +++ b/packages/cosmic-swingset/src/launch-chain.js @@ -843,12 +843,6 @@ export async function launch({ return undefined; } - case ActionType.ENACT_UPGRADE_PLAN: { - // Dispatch to upgrade-specific handler - // const { plan } = action; - // const { name, height, info } = plan; - return undefined; - } default: { throw Fail`Unrecognized action ${action}; are you sure you didn't mean to queue it?`; diff --git a/packages/internal/src/action-types.js b/packages/internal/src/action-types.js index e5ed9a05dcf..2d5a3d77ce2 100644 --- a/packages/internal/src/action-types.js +++ b/packages/internal/src/action-types.js @@ -15,4 +15,3 @@ export const VBANK_BALANCE_UPDATE = 'VBANK_BALANCE_UPDATE'; export const WALLET_ACTION = 'WALLET_ACTION'; export const WALLET_SPEND_ACTION = 'WALLET_SPEND_ACTION'; export const INSTALL_BUNDLE = 'INSTALL_BUNDLE'; -export const ENACT_UPGRADE_PLAN = 'ENACT_UPGRADE_PLAN'; From 475708e63bb95d75184072547ca92586a978c5a0 Mon Sep 17 00:00:00 2001 From: Richard Gibson Date: Mon, 10 Jul 2023 12:56:39 -0400 Subject: [PATCH 042/109] fix(cosmos): Support building on Linux aarch64 --- golang/cosmos/Makefile | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/golang/cosmos/Makefile b/golang/cosmos/Makefile index a5c9da82b07..02696a0284b 100644 --- a/golang/cosmos/Makefile +++ b/golang/cosmos/Makefile @@ -133,7 +133,11 @@ BUF_VERSION ?= 0.56.0 PROTOC_VERSION ?= 3.11.2 ifeq ($(UNAME_S),Linux) - PROTOC_ZIP ?= protoc-${PROTOC_VERSION}-linux-x86_64.zip + ifeq ($(UNAME_M),aarch64) + PROTOC_ZIP ?= protoc-${PROTOC_VERSION}-linux-aarch_64.zip + else + PROTOC_ZIP ?= protoc-${PROTOC_VERSION}-linux-x86_64.zip + endif endif ifeq ($(UNAME_S),Darwin) PROTOC_ZIP ?= protoc-${PROTOC_VERSION}-osx-x86_64.zip From 0b1a85b1bdff2f9001a712178f9d81fad559808e Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sun, 23 Jul 2023 17:05:23 +0000 Subject: [PATCH 043/109] fix(ci): checkout agoric-sdk in sub folder Restore loadgen in deployment test --- .github/actions/restore-golang/action.yml | 9 +++++++-- .github/workflows/deployment-test.yml | 20 ++++++++++++++------ 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/.github/actions/restore-golang/action.yml b/.github/actions/restore-golang/action.yml index 4fc1ce9f4fd..6fd719e84f0 100644 --- a/.github/actions/restore-golang/action.yml +++ b/.github/actions/restore-golang/action.yml @@ -5,6 +5,10 @@ inputs: go-version: description: 'The version of Go to use' required: true + path: + description: 'The relative path to the agoric-sdk directory' + required: false + default: '.' runs: using: composite @@ -14,11 +18,12 @@ runs: shell: bash - uses: actions/checkout@v3 with: + path: ${{ inputs.path }} clean: 'false' submodules: 'true' - uses: actions/setup-go@v4 with: - cache-dependency-path: golang/cosmos/go.sum + cache-dependency-path: ${{ inputs.path }}/golang/cosmos/go.sum go-version: ${{ inputs.go-version }} - uses: kenchan0130/actions-system-info@master id: system-info @@ -31,7 +36,7 @@ runs: restore-keys: | ${{ runner.os }}-${{ runner.arch }}-${{ steps.system-info.outputs.release }}-go-${{ inputs.go-version }}-built- - name: go mod download - working-directory: ./golang/cosmos + working-directory: ${{ inputs.path }}/golang/cosmos run: go mod download shell: bash if: steps.cache.outputs.cache-hit != 'true' diff --git a/.github/workflows/deployment-test.yml b/.github/workflows/deployment-test.yml index 3aecefc2d6e..1675e48cb50 100644 --- a/.github/workflows/deployment-test.yml +++ b/.github/workflows/deployment-test.yml @@ -25,13 +25,17 @@ jobs: - uses: actions/checkout@v3 with: submodules: 'true' + path: ./agoric-sdk - run: sudo packages/deployment/scripts/install-deps.sh - - uses: ./.github/actions/restore-golang + working-directory: ./agoric-sdk + - uses: ./agoric-sdk/.github/actions/restore-golang with: go-version: '1.20' - - uses: ./.github/actions/restore-node + path: ./agoric-sdk + - uses: ./agoric-sdk/.github/actions/restore-node with: node-version: 18.x + path: ./agoric-sdk # Forces xsnap to initialize all memory to random data, which increases # the chances the content of snapshots may deviate between validators xsnap-random-init: '1' @@ -61,15 +65,18 @@ jobs: uses: actions/checkout@v3 with: repository: Agoric/testnet-load-generator - path: testnet-load-generator + path: ./testnet-load-generator ref: ${{steps.get-loadgen-branch.outputs.result}} - name: Build cosmic-swingset dependencies + working-directory: ./agoric-sdk run: | set -e cd packages/cosmic-swingset make install - - run: | + - name: Run integration test + working-directory: ./agoric-sdk + run: | set -xe DOCKER_VOLUMES="$PWD:/usr/src/agoric-sdk" \ packages/deployment/scripts/integration-test.sh @@ -78,6 +85,7 @@ jobs: NETWORK_NAME: chaintest - name: capture results if: always() + working-directory: ./agoric-sdk run: | NOW=$(date -u +%Y%m%dT%H%M%S) echo "NOW=$NOW" >> "$GITHUB_ENV" @@ -96,11 +104,11 @@ jobs: if: always() with: name: deployment-test-results-${{ env.NOW }} - path: chaintest/results + path: ./agoric-sdk/chaintest/results - name: notify on failure if: failure() && github.event_name != 'pull_request' - uses: ./.github/actions/notify-status + uses: ./agoric-sdk/.github/actions/notify-status with: webhook: ${{ secrets.SLACK_WEBHOOK_URL }} from: ${{ secrets.NOTIFY_EMAIL_FROM }} From e22cfff8d7e619887b9e11e8a6334d7fcc10c22f Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 12 Jul 2023 16:29:00 +0000 Subject: [PATCH 044/109] chore(cosmic-swingset): check state before snapshot restore --- packages/cosmic-swingset/src/chain-main.js | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 46ef7291ef3..4f13859f270 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -493,6 +493,9 @@ export default async function main(progname, args, { env, homedir, agcc }) { }; } + /** @type {Awaited>['blockingSend'] | undefined} */ + let blockingSend; + async function handleCosmosSnapshot(blockHeight, request, requestArgs) { switch (request) { case 'restore': { @@ -500,6 +503,9 @@ export default async function main(progname, args, { env, homedir, agcc }) { if (typeof exportDir !== 'string') { throw Fail`Invalid exportDir argument ${q(exportDir)}`; } + !stateSyncExport || + Fail`Snapshot already in progress for ${stateSyncExport.blockHeight}`; + !blockingSend || Fail`Cannot restore snapshot after init`; console.info( 'Restoring SwingSet state from snapshot at block height', blockHeight, @@ -607,9 +613,6 @@ export default async function main(progname, args, { env, homedir, agcc }) { } } - /** @type {Awaited>['blockingSend'] | undefined} */ - let blockingSend; - async function toSwingSet(action, _replier) { // console.log(`toSwingSet`, action); From 0c1b727cc5b1cf25fab8d59274baf420fdc61352 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Wed, 19 Jul 2023 20:28:18 -0600 Subject: [PATCH 045/109] feat(deployment): find testnet-load-generator in parent of agoric-sdk --- packages/deployment/scripts/integration-test.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/deployment/scripts/integration-test.sh b/packages/deployment/scripts/integration-test.sh index cdec5352f15..42554f4c177 100755 --- a/packages/deployment/scripts/integration-test.sh +++ b/packages/deployment/scripts/integration-test.sh @@ -7,10 +7,14 @@ thisdir=$(cd "$(dirname -- "$real0")" > /dev/null && pwd -P) export GOBIN="$thisdir/../../../golang/cosmos/build" export NETWORK_NAME=${NETWORK_NAME-localtest} +SDK_SRC=${SDK_SRC-$(cd "$thisdir/../../.." > /dev/null && pwd -P)} + DEFAULT_LOADGEN=/usr/src/testnet-load-generator LOADGEN=${LOADGEN-""} if [ -n "$LOADGEN" ]; then LOADGEN=$(cd "$LOADGEN" > /dev/null && pwd -P) +elif [ -d "$SDK_SRC/../testnet-load-generator" ]; then + LOADGEN=$(cd "$SDK_SRC/../testnet-load-generator" > /dev/null && pwd -P) elif [ -d "$DEFAULT_LOADGEN" ]; then LOADGEN=$(cd "$DEFAULT_LOADGEN" > /dev/null && pwd -P) else @@ -31,7 +35,6 @@ cd "$NETWORK_NAME/setup" export AG_SETUP_COSMOS_HOME=${AG_SETUP_COSMOS_HOME-$PWD} export AG_SETUP_COSMOS_STATE_SYNC_INTERVAL=20 -SDK_SRC=${SDK_SRC-$(cd "$thisdir/../../.." > /dev/null && pwd -P)} if [ -n "$LOADGEN" ] then From 64cb9b2e2fb960f738734ebaff16288897a7b113 Mon Sep 17 00:00:00 2001 From: Jim Larson Date: Tue, 11 Jul 2023 00:12:26 -0700 Subject: [PATCH 046/109] refactor: no longer a need for upgrade function to talk over bridge --- golang/cosmos/app/app.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index c646bb5ae63..9d037a3bf3f 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -746,11 +746,11 @@ func NewAgoricApp( app.UpgradeKeeper.SetUpgradeHandler( upgradeName, - upgrade11Handler(app, upgradeName, callToController), + upgrade11Handler(app, upgradeName), ) app.UpgradeKeeper.SetUpgradeHandler( upgradeNameTest, - upgrade11Handler(app, upgradeNameTest, callToController), + upgrade11Handler(app, upgradeNameTest), ) if loadLatest { @@ -774,7 +774,7 @@ func NewAgoricApp( } // upgrade11Handler performs standard upgrade actions plus custom actions for upgrade-11. -func upgrade11Handler(app *GaiaApp, targetUpgrade string, callToController func(ctx sdk.Context, str string) (string, error)) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { +func upgrade11Handler(app *GaiaApp, targetUpgrade string) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { return func(ctx sdk.Context, plan upgradetypes.Plan, fromVm module.VersionMap) (module.VersionMap, error) { // Record the plan to send to SwingSet app.upgradePlan = &plan From e567c21a224d239c467f740bb937f84b18db9dd7 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 27 Jul 2023 16:48:15 +0000 Subject: [PATCH 047/109] fix(cosmos): don't init controller before upgrade --- golang/cosmos/app/app.go | 67 +++++++++++++++++++----------- golang/cosmos/x/swingset/module.go | 16 ++++--- 2 files changed, 52 insertions(+), 31 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 318d3a4add0..3fb2d0b4d41 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -598,7 +598,7 @@ func NewAgoricApp( transferModule, icaModule, vstorage.NewAppModule(app.VstorageKeeper), - swingset.NewAppModule(app.SwingSetKeeper, setBootstrapNeeded), + swingset.NewAppModule(app.SwingSetKeeper, setBootstrapNeeded, app.ensureControllerInited), vibcModule, vbankModule, lienModule, @@ -631,6 +631,8 @@ func NewAgoricApp( paramstypes.ModuleName, vestingtypes.ModuleName, vstorage.ModuleName, + // This will cause the swingset controller to init if it hadn't yet, passing + // any upgrade plan or bootstrap flag when starting at an upgrade height swingset.ModuleName, vibc.ModuleName, vbank.ModuleName, @@ -792,6 +794,7 @@ func NewAgoricApp( // upgrade11Handler performs standard upgrade actions plus custom actions for upgrade-11. func upgrade11Handler(app *GaiaApp, targetUpgrade string) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { return func(ctx sdk.Context, plan upgradetypes.Plan, fromVm module.VersionMap) (module.VersionMap, error) { + app.CheckControllerInited(false) // Record the plan to send to SwingSet app.upgradePlan = &plan @@ -847,13 +850,21 @@ func (app *GaiaApp) CheckControllerInited(expected bool) { } } +type bootstrapBlockAction struct { + Type string `json:"type"` + BlockTime int64 `json:"blockTime"` +} + // initController sends the initialization message to the VM. // Exits if the controller has already been initialized. +// The init message will contain any upgrade plan if we're starting after an +// upgrade, and a flag indicating whether this is a bootstrap of the controller. func (app *GaiaApp) initController(ctx sdk.Context, bootstrap bool) { app.CheckControllerInited(false) app.controllerInited = true // Begin initializing the controller here. - action := &cosmosInitAction{ + var action vm.Jsonable + action = &cosmosInitAction{ Type: "AG_COSMOS_INIT", ChainID: ctx.ChainID(), IsBootstrap: bootstrap, @@ -881,35 +892,46 @@ func (app *GaiaApp) initController(ctx sdk.Context, bootstrap bool) { if !res { panic(fmt.Errorf("controller negative init response")) } -} -type bootstrapBlockAction struct { - Type string `json:"type"` - BlockTime int64 `json:"blockTime"` -} - -// BootstrapController initializes the controller (with the bootstrap flag) and sends a bootstrap action. -func (app *GaiaApp) BootstrapController(ctx sdk.Context) error { - app.initController(ctx, true) + if !bootstrap { + return + } stdlog.Println("Running SwingSet until bootstrap is ready") // Just run the SwingSet kernel to finish bootstrap and get ready to open for // business. - action := &bootstrapBlockAction{ + action = &bootstrapBlockAction{ Type: "BOOTSTRAP_BLOCK", BlockTime: ctx.BlockTime().Unix(), } - _, err := app.SwingSetKeeper.BlockingSend(ctx, action) - return err + _, err = app.SwingSetKeeper.BlockingSend(ctx, action) + // fmt.Fprintf(os.Stderr, "BOOTSTRAP_BLOCK Returned from swingset: %s, %v\n", out, err) + if err != nil { + // NOTE: A failed BOOTSTRAP_BLOCK means that the SwingSet state is inconsistent. + // Panic here, in the hopes that a replay from scratch will fix the problem. + panic(err) + } } -// BeginBlocker application updates every begin block -func (app *GaiaApp) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { - if !app.controllerInited { - app.initController(ctx, false) +// ensureControllerInited inits the controller if needed. It's used by the +// x/swingset module's BeginBlock to lazily start the JS controller. +// We cannot init early as we don't know when starting the software if this +// might be a simple restart, or a chain init from genesis or upgrade which +// require the controller to not be inited yet. +func (app *GaiaApp) ensureControllerInited(ctx sdk.Context) { + if app.controllerInited { + return } + // While we don't expect it anymore, some upgrade may want to throw away + // the current JS state and bootstrap again (bulldozer). In that case the + // upgrade handler can just set the bootstrapNeeded flag. + app.initController(ctx, app.bootstrapNeeded) +} + +// BeginBlocker application updates every begin block +func (app *GaiaApp) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { return app.mm.BeginBlock(ctx, req) } @@ -933,14 +955,9 @@ func (app *GaiaApp) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ProvisionPoolName) normalizeModuleAccount(ctx, app.AccountKeeper, vbanktypes.ReservePoolName) + // Init early (before first BeginBlock) to run the potentially lengthy bootstrap if app.bootstrapNeeded { - err := app.BootstrapController(ctx) - // fmt.Fprintf(os.Stderr, "BOOTSTRAP_BLOCK Returned from swingset: %s, %v\n", out, err) - if err != nil { - // NOTE: A failed BOOTSTRAP_BLOCK means that the SwingSet state is inconsistent. - // Panic here, in the hopes that a replay from scratch will fix the problem. - panic(err) - } + app.initController(ctx, true) } // Agoric: report the genesis time explicitly. diff --git a/golang/cosmos/x/swingset/module.go b/golang/cosmos/x/swingset/module.go index 9e38f05e864..a5f180beba4 100644 --- a/golang/cosmos/x/swingset/module.go +++ b/golang/cosmos/x/swingset/module.go @@ -80,16 +80,18 @@ func (AppModuleBasic) GetTxCmd() *cobra.Command { type AppModule struct { AppModuleBasic - keeper Keeper - setBootstrapNeeded func() + keeper Keeper + setBootstrapNeeded func() + ensureControllerInited func(sdk.Context) } // NewAppModule creates a new AppModule Object -func NewAppModule(k Keeper, setBootstrapNeeded func()) AppModule { +func NewAppModule(k Keeper, setBootstrapNeeded func(), ensureControllerInited func(sdk.Context)) AppModule { am := AppModule{ - AppModuleBasic: AppModuleBasic{}, - keeper: k, - setBootstrapNeeded: setBootstrapNeeded, + AppModuleBasic: AppModuleBasic{}, + keeper: k, + setBootstrapNeeded: setBootstrapNeeded, + ensureControllerInited: ensureControllerInited, } return am } @@ -127,6 +129,8 @@ func (am AppModule) RegisterServices(cfg module.Configurator) { func (AppModule) ConsensusVersion() uint64 { return 2 } func (am AppModule) BeginBlock(ctx sdk.Context, req abci.RequestBeginBlock) { + am.ensureControllerInited(ctx) + err := BeginBlock(ctx, req, am.keeper) if err != nil { fmt.Println("BeginBlock error:", err) From 88889a6ab980b6aec9e807a80be17c42b3cbc368 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sun, 23 Jul 2023 18:07:00 +0000 Subject: [PATCH 048/109] fix(ci): require loadgen in deployment integration test --- .github/workflows/deployment-test.yml | 1 + .../deployment/scripts/integration-test.sh | 22 +++++++++++++------ scripts/run-deployment-integration.sh | 1 + 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/.github/workflows/deployment-test.yml b/.github/workflows/deployment-test.yml index 1675e48cb50..2ee7a5d795c 100644 --- a/.github/workflows/deployment-test.yml +++ b/.github/workflows/deployment-test.yml @@ -79,6 +79,7 @@ jobs: run: | set -xe DOCKER_VOLUMES="$PWD:/usr/src/agoric-sdk" \ + LOADGEN=1 \ packages/deployment/scripts/integration-test.sh timeout-minutes: 90 env: diff --git a/packages/deployment/scripts/integration-test.sh b/packages/deployment/scripts/integration-test.sh index 42554f4c177..65a3809b391 100755 --- a/packages/deployment/scripts/integration-test.sh +++ b/packages/deployment/scripts/integration-test.sh @@ -9,16 +9,24 @@ export NETWORK_NAME=${NETWORK_NAME-localtest} SDK_SRC=${SDK_SRC-$(cd "$thisdir/../../.." > /dev/null && pwd -P)} -DEFAULT_LOADGEN=/usr/src/testnet-load-generator LOADGEN=${LOADGEN-""} -if [ -n "$LOADGEN" ]; then +if [ -z "$LOADGEN" ] || [ "x$LOADGEN" = "x1" ]; then + for dir in "$SDK_SRC/../testnet-load-generator" /usr/src/testnet-load-generator; do + if [ -d "$dir" ]; then + LOADGEN="$dir" + break + fi + done +fi + +if [ -d "$LOADGEN" ]; then + # Get the absolute path. LOADGEN=$(cd "$LOADGEN" > /dev/null && pwd -P) -elif [ -d "$SDK_SRC/../testnet-load-generator" ]; then - LOADGEN=$(cd "$SDK_SRC/../testnet-load-generator" > /dev/null && pwd -P) -elif [ -d "$DEFAULT_LOADGEN" ]; then - LOADGEN=$(cd "$DEFAULT_LOADGEN" > /dev/null && pwd -P) +elif [ -n "$LOADGEN" ]; then + echo "Cannot find loadgen (\$LOADGEN=$LOADGEN)" >&2 + exit 2 else - LOADGEN= + echo "Running chain without loadgen" >&2 fi SOLO_ADDR= diff --git a/scripts/run-deployment-integration.sh b/scripts/run-deployment-integration.sh index 18ced9839ac..251ca22459e 100644 --- a/scripts/run-deployment-integration.sh +++ b/scripts/run-deployment-integration.sh @@ -28,6 +28,7 @@ yarn install && XSNAP_RANDOM_INIT=1 yarn build && make -C packages/cosmic-swings # change to "false" to skip extraction on success like in CI testfailure="unknown" DOCKER_VOLUMES="$AGORIC_SDK_PATH:/usr/src/agoric-sdk" \ +LOADGEN=1 \ packages/deployment/scripts/integration-test.sh || { echo "Test failed!!!" testfailure="true" From c946d5866ef956c198d7ea14936eb9904aa272ae Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 14 Jul 2023 19:27:21 +0000 Subject: [PATCH 049/109] fix(x/swingset): enforce snapshot restore before init --- golang/cosmos/app/app.go | 14 +++- .../cosmos/x/swingset/keeper/snapshotter.go | 69 +++++++++---------- .../x/swingset/keeper/snapshotter_test.go | 8 +-- 3 files changed, 49 insertions(+), 42 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 5951dad0a6f..318d3a4add0 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -459,8 +459,18 @@ func NewAgoricApp( app.SwingSetSnapshotter = swingsetkeeper.NewSwingsetSnapshotter( bApp, - app.SwingSetKeeper, - sendToController, + app.SwingSetKeeper.ExportSwingStore, + func(action vm.Jsonable, mustNotBeInited bool) (string, error) { + if mustNotBeInited { + app.CheckControllerInited(false) + } + + bz, err := json.Marshal(action) + if err != nil { + return "", err + } + return sendToController(true, string(bz)) + }, ) app.VibcKeeper = vibc.NewKeeper( diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index 2cba32bcf6f..da402b89b80 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -62,17 +62,13 @@ type exportManifest struct { Artifacts [][2]string `json:"artifacts"` } -type SwingStoreExporter interface { - ExportSwingStore(ctx sdk.Context) []*vstoragetypes.DataEntry -} - type SwingsetSnapshotter struct { - isConfigured func() bool - takeSnapshot func(height int64) - newRestoreContext func(height int64) sdk.Context - logger log.Logger - exporter SwingStoreExporter - blockingSend func(action vm.Jsonable) (string, error) + isConfigured func() bool + takeSnapshot func(height int64) + newRestoreContext func(height int64) sdk.Context + logger log.Logger + getSwingStoreExportData func(ctx sdk.Context) []*vstoragetypes.DataEntry + blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error) // Only modified by the main goroutine. activeSnapshot *activeSnapshot } @@ -84,33 +80,34 @@ type snapshotAction struct { Args []json.RawMessage `json:"args,omitempty"` } -func NewSwingsetSnapshotter(app *baseapp.BaseApp, exporter SwingStoreExporter, sendToController func(bool, string) (string, error)) SwingsetSnapshotter { - // The sendToController performed by this submodule are non-deterministic. - // This submodule will send messages to JS from goroutines at unpredictable - // times, but this is safe because when handling the messages, the JS side - // does not perform operations affecting consensus and ignores state changes - // since committing the previous block. - // Since this submodule implements block level commit synchronization, the - // processing and results are both insensitive to sub-block timing of messages. - - blockingSend := func(action vm.Jsonable) (string, error) { - bz, err := json.Marshal(action) - if err != nil { - return "", err - } - return sendToController(true, string(bz)) - } - +// NewSwingsetSnapshotter creates a SwingsetSnapshotter which exclusively +// manages communication with the JS side for Swingset snapshots, ensuring +// insensitivity to sub-block timing, and enforcing concurrency requirements. +// The caller of this submodule must arrange block level commit synchronization, +// to ensure the results are deterministic. +// +// Some `blockingSend` calls performed by this submodule are non-deterministic. +// This submodule will send messages to JS from goroutines at unpredictable +// times, but this is safe because when handling the messages, the JS side +// does not perform operations affecting consensus and ignores state changes +// since committing the previous block. +// Some other `blockingSend` calls however do change the JS swing-store and +// must happen before the Swingset controller on the JS side was inited. +func NewSwingsetSnapshotter( + app *baseapp.BaseApp, + getSwingStoreExportData func(ctx sdk.Context) []*vstoragetypes.DataEntry, + blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error), +) SwingsetSnapshotter { return SwingsetSnapshotter{ isConfigured: func() bool { return app.SnapshotManager() != nil }, takeSnapshot: app.Snapshot, newRestoreContext: func(height int64) sdk.Context { return app.NewUncachedContext(false, tmproto.Header{Height: height}) }, - logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "snapshotter"), - exporter: exporter, - blockingSend: blockingSend, - activeSnapshot: nil, + logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "snapshotter"), + getSwingStoreExportData: getSwingStoreExportData, + blockingSend: blockingSend, + activeSnapshot: nil, } } @@ -157,7 +154,7 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { } // blockingSend for COSMOS_SNAPSHOT action is safe to call from a goroutine - _, err := snapshotter.blockingSend(action) + _, err := snapshotter.blockingSend(action, false) if err != nil { // First indicate a snapshot is no longer in progress if the call to @@ -188,7 +185,7 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { BlockHeight: height, Request: "discard", } - _, err = snapshotter.blockingSend(action) + _, err = snapshotter.blockingSend(action, false) if err != nil { logger.Error("failed to discard swingset snapshot", "err", err) @@ -286,7 +283,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(height uint64, payload BlockHeight: activeSnapshot.height, Request: "retrieve", } - out, err := snapshotter.blockingSend(action) + out, err := snapshotter.blockingSend(action, false) if err != nil { return err @@ -392,7 +389,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u // At this point the content of the cosmos DB has been verified against the // AppHash, which means the SwingStore data it contains can be used as the // trusted root against which to validate the artifacts. - swingStoreEntries := snapshotter.exporter.ExportSwingStore(ctx) + swingStoreEntries := snapshotter.getSwingStoreExportData(ctx) if len(swingStoreEntries) > 0 { encoder := json.NewEncoder(exportDataFile) @@ -480,7 +477,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u Args: []json.RawMessage{encodedExportDir}, } - _, err = snapshotter.blockingSend(action) + _, err = snapshotter.blockingSend(action, true) if err != nil { return err } diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/snapshotter_test.go index e1b6f36dc94..0c1474879e3 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter_test.go @@ -16,7 +16,7 @@ func newTestSnapshotter() SwingsetSnapshotter { takeSnapshot: func(height int64) {}, newRestoreContext: func(height int64) sdk.Context { return sdk.Context{} }, logger: logger, - blockingSend: func(action vm.Jsonable) (string, error) { return "", nil }, + blockingSend: func(action vm.Jsonable, mustNotBeInited bool) (string, error) { return "", nil }, } } @@ -89,7 +89,7 @@ func TestSecondCommit(t *testing.T) { func TestInitiateFails(t *testing.T) { swingsetSnapshotter := newTestSnapshotter() - swingsetSnapshotter.blockingSend = func(action vm.Jsonable) (string, error) { + swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { if action.(*snapshotAction).Request == "initiate" { return "", errors.New("initiate failed") } @@ -116,7 +116,7 @@ func TestInitiateFails(t *testing.T) { func TestRetrievalFails(t *testing.T) { swingsetSnapshotter := newTestSnapshotter() - swingsetSnapshotter.blockingSend = func(action vm.Jsonable) (string, error) { + swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { if action.(*snapshotAction).Request == "retrieve" { return "", errors.New("retrieve failed") } @@ -152,7 +152,7 @@ func TestRetrievalFails(t *testing.T) { func TestDiscard(t *testing.T) { discardCalled := false swingsetSnapshotter := newTestSnapshotter() - swingsetSnapshotter.blockingSend = func(action vm.Jsonable) (string, error) { + swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { if action.(*snapshotAction).Request == "discard" { discardCalled = true } From e451c2eb749200f7528e65e3a4fc17f170b05360 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 12 Jul 2023 14:18:42 +0000 Subject: [PATCH 050/109] fix(scripts): remove new home folder before local deployment test --- scripts/run-deployment-integration.sh | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/scripts/run-deployment-integration.sh b/scripts/run-deployment-integration.sh index 66a8c9ecb70..18ced9839ac 100644 --- a/scripts/run-deployment-integration.sh +++ b/scripts/run-deployment-integration.sh @@ -12,7 +12,15 @@ export AGORIC_SDK_PATH="${AGORIC_SDK_PATH-$SDK_SRC}" export NETWORK_NAME=chaintest sudo ln -sf "$SDK_SRC/packages/deployment/bin/ag-setup-cosmos" /usr/local/bin/ag-setup-cosmos -rm -rf "$SDK_SRC/chaintest" ~/.ag-chain-cosmos/ /usr/src/testnet-load-generator/_agstate/agoric-servers/testnet-8000 + +# Note: the deployment test and the loadgen test in testnet mode modify some +# directories in $HOME so provide an empty $HOME for them. +export HOME="$(mktemp -d -t deployment-integration-home.XXXXX)" + +# While it'd be great if these [tests were more hermetic](https://github.com/Agoric/agoric-sdk/issues/8059), +# this manual runner must currently reset paths relative to the SDK to ensure +# reproducible tests. +rm -rf "$SDK_SRC/chaintest" "$SDK_SRC/../testnet-load-generator/_agstate/agoric-servers/testnet-8000" cd "$SDK_SRC" sudo ./packages/deployment/scripts/install-deps.sh From 50944cfbef164cdca2bffb4ca2980aa617a851ce Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 13 Jul 2023 15:32:37 +0000 Subject: [PATCH 051/109] refactor(cosmos): rename snapshot artifact protobuf --- .../proto/agoric/swingset/swingset.proto | 7 +- .../cosmos/x/swingset/keeper/snapshotter.go | 24 +-- golang/cosmos/x/swingset/types/swingset.pb.go | 162 +++++++++--------- 3 files changed, 99 insertions(+), 94 deletions(-) diff --git a/golang/cosmos/proto/agoric/swingset/swingset.proto b/golang/cosmos/proto/agoric/swingset/swingset.proto index 991738b9569..c1a238edfea 100644 --- a/golang/cosmos/proto/agoric/swingset/swingset.proto +++ b/golang/cosmos/proto/agoric/swingset/swingset.proto @@ -150,8 +150,11 @@ message Egress { ]; } -// The payload messages used by swingset state-sync -message ExtensionSnapshotterArtifactPayload { +// SwingStoreArtifact encodes an artifact of a swing-store export. +// Artifacts may be stored or transmitted in any order. Most handlers do +// maintain the artifact order from their original source as an effect of how +// they handle the artifacts. +message SwingStoreArtifact { option (gogoproto.equal) = false; string name = 1 [ (gogoproto.jsontag) = "name", diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index d0d4b738c64..286253e3b30 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -329,14 +329,14 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(height uint64, payload } writeFileToPayload := func(fileName string, artifactName string) error { - payload := types.ExtensionSnapshotterArtifactPayload{Name: artifactName} + artifact := types.SwingStoreArtifact{Name: artifactName} - payload.Data, err = os.ReadFile(filepath.Join(exportDir, fileName)) + artifact.Data, err = os.ReadFile(filepath.Join(exportDir, fileName)) if err != nil { return err } - payloadBytes, err := payload.Marshal() + payloadBytes, err := artifact.Marshal() if err != nil { return err } @@ -455,34 +455,34 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u return err } - payload := types.ExtensionSnapshotterArtifactPayload{} - if err = payload.Unmarshal(payloadBytes); err != nil { + artifact := types.SwingStoreArtifact{} + if err = artifact.Unmarshal(payloadBytes); err != nil { return err } switch { - case payload.Name != UntrustedExportDataArtifactName: + case artifact.Name != UntrustedExportDataArtifactName: // Artifact verifiable on import from the export data - // Since we cannot trust the state-sync payload at this point, we generate + // Since we cannot trust the state-sync artifact at this point, we generate // a safe and unique filename from the artifact name we received, by // substituting any non letters-digits-hyphen-underscore-dot by a hyphen, // and prefixing with an incremented id. // The filename is not used for any purpose in the snapshotting logic. - filename := sanitizeArtifactName(payload.Name) + filename := sanitizeArtifactName(artifact.Name) filename = fmt.Sprintf("%d-%s", len(manifest.Artifacts), filename) - manifest.Artifacts = append(manifest.Artifacts, [2]string{payload.Name, filename}) - err = writeExportFile(filename, payload.Data) + manifest.Artifacts = append(manifest.Artifacts, [2]string{artifact.Name, filename}) + err = writeExportFile(filename, artifact.Data) case len(swingStoreEntries) > 0: // Pseudo artifact containing untrusted export data which may have been // saved separately for debugging purposes (not referenced from the manifest) - err = writeExportFile(UntrustedExportDataFilename, payload.Data) + err = writeExportFile(UntrustedExportDataFilename, artifact.Data) default: // There is no trusted export data err = errors.New("cannot restore from untrusted export data") // snapshotter.logger.Info("using untrusted export data for swingstore restore") - // _, err = exportDataFile.Write(payload.Data) + // _, err = exportDataFile.Write(artifact.Data) } if err != nil { diff --git a/golang/cosmos/x/swingset/types/swingset.pb.go b/golang/cosmos/x/swingset/types/swingset.pb.go index 940e367d844..061101daa2c 100644 --- a/golang/cosmos/x/swingset/types/swingset.pb.go +++ b/golang/cosmos/x/swingset/types/swingset.pb.go @@ -495,24 +495,27 @@ func (m *Egress) GetPowerFlags() []string { return nil } -// The payload messages used by swingset state-sync -type ExtensionSnapshotterArtifactPayload struct { +// SwingStoreArtifact encodes an artifact of a swing-store export. +// Artifacts may be stored or transmitted in any order. Most handlers do +// maintain the artifact order from their original source as an effect of how +// they handle the artifacts. +type SwingStoreArtifact struct { Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name" yaml:"name"` Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data" yaml:"data"` } -func (m *ExtensionSnapshotterArtifactPayload) Reset() { *m = ExtensionSnapshotterArtifactPayload{} } -func (m *ExtensionSnapshotterArtifactPayload) String() string { return proto.CompactTextString(m) } -func (*ExtensionSnapshotterArtifactPayload) ProtoMessage() {} -func (*ExtensionSnapshotterArtifactPayload) Descriptor() ([]byte, []int) { +func (m *SwingStoreArtifact) Reset() { *m = SwingStoreArtifact{} } +func (m *SwingStoreArtifact) String() string { return proto.CompactTextString(m) } +func (*SwingStoreArtifact) ProtoMessage() {} +func (*SwingStoreArtifact) Descriptor() ([]byte, []int) { return fileDescriptor_ff9c341e0de15f8b, []int{8} } -func (m *ExtensionSnapshotterArtifactPayload) XXX_Unmarshal(b []byte) error { +func (m *SwingStoreArtifact) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } -func (m *ExtensionSnapshotterArtifactPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { +func (m *SwingStoreArtifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { - return xxx_messageInfo_ExtensionSnapshotterArtifactPayload.Marshal(b, m, deterministic) + return xxx_messageInfo_SwingStoreArtifact.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) @@ -522,26 +525,26 @@ func (m *ExtensionSnapshotterArtifactPayload) XXX_Marshal(b []byte, deterministi return b[:n], nil } } -func (m *ExtensionSnapshotterArtifactPayload) XXX_Merge(src proto.Message) { - xxx_messageInfo_ExtensionSnapshotterArtifactPayload.Merge(m, src) +func (m *SwingStoreArtifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_SwingStoreArtifact.Merge(m, src) } -func (m *ExtensionSnapshotterArtifactPayload) XXX_Size() int { +func (m *SwingStoreArtifact) XXX_Size() int { return m.Size() } -func (m *ExtensionSnapshotterArtifactPayload) XXX_DiscardUnknown() { - xxx_messageInfo_ExtensionSnapshotterArtifactPayload.DiscardUnknown(m) +func (m *SwingStoreArtifact) XXX_DiscardUnknown() { + xxx_messageInfo_SwingStoreArtifact.DiscardUnknown(m) } -var xxx_messageInfo_ExtensionSnapshotterArtifactPayload proto.InternalMessageInfo +var xxx_messageInfo_SwingStoreArtifact proto.InternalMessageInfo -func (m *ExtensionSnapshotterArtifactPayload) GetName() string { +func (m *SwingStoreArtifact) GetName() string { if m != nil { return m.Name } return "" } -func (m *ExtensionSnapshotterArtifactPayload) GetData() []byte { +func (m *SwingStoreArtifact) GetData() []byte { if m != nil { return m.Data } @@ -557,67 +560,66 @@ func init() { proto.RegisterType((*PowerFlagFee)(nil), "agoric.swingset.PowerFlagFee") proto.RegisterType((*QueueSize)(nil), "agoric.swingset.QueueSize") proto.RegisterType((*Egress)(nil), "agoric.swingset.Egress") - proto.RegisterType((*ExtensionSnapshotterArtifactPayload)(nil), "agoric.swingset.ExtensionSnapshotterArtifactPayload") + proto.RegisterType((*SwingStoreArtifact)(nil), "agoric.swingset.SwingStoreArtifact") } func init() { proto.RegisterFile("agoric/swingset/swingset.proto", fileDescriptor_ff9c341e0de15f8b) } var fileDescriptor_ff9c341e0de15f8b = []byte{ - // 858 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0xbd, 0x6f, 0x23, 0x45, - 0x14, 0xf7, 0x62, 0x3b, 0xc4, 0xcf, 0xbe, 0xe4, 0x18, 0x22, 0x9d, 0x89, 0x38, 0x4f, 0xb4, 0x14, - 0x44, 0x3a, 0x9d, 0x7d, 0x01, 0x21, 0x24, 0x9f, 0x28, 0xbc, 0x91, 0x4f, 0x27, 0x21, 0x90, 0xd9, - 0x28, 0x14, 0x08, 0xb4, 0x1a, 0xaf, 0xc7, 0x7b, 0x93, 0xac, 0x67, 0xf6, 0x66, 0x26, 0x5f, 0xd7, - 0x23, 0x68, 0x90, 0x10, 0x15, 0x65, 0x6a, 0xfe, 0x92, 0x2b, 0xaf, 0x44, 0x14, 0x0b, 0x4a, 0x1a, - 0x94, 0xd2, 0x25, 0x12, 0x12, 0x9a, 0x99, 0xf5, 0xc6, 0x22, 0x48, 0xa4, 0xb9, 0x6a, 0xdf, 0xe7, - 0xef, 0xbd, 0xf7, 0x7b, 0x33, 0x3b, 0xd0, 0x21, 0x89, 0x90, 0x2c, 0xee, 0xa9, 0x13, 0xc6, 0x13, - 0x45, 0x75, 0x29, 0x74, 0x33, 0x29, 0xb4, 0x40, 0xeb, 0xce, 0xdf, 0x5d, 0x98, 0x37, 0x37, 0x12, - 0x91, 0x08, 0xeb, 0xeb, 0x19, 0xc9, 0x85, 0x6d, 0x76, 0x62, 0xa1, 0x66, 0x42, 0xf5, 0xc6, 0x44, - 0xd1, 0xde, 0xf1, 0xce, 0x98, 0x6a, 0xb2, 0xd3, 0x8b, 0x05, 0xe3, 0xce, 0xef, 0x7f, 0xe7, 0xc1, - 0xdd, 0x5d, 0x21, 0xe9, 0xf0, 0x98, 0xa4, 0x23, 0x29, 0x32, 0xa1, 0x48, 0x8a, 0x36, 0xa0, 0xae, - 0x99, 0x4e, 0x69, 0xdb, 0xdb, 0xf2, 0xb6, 0x1b, 0xa1, 0x53, 0xd0, 0x16, 0x34, 0x27, 0x54, 0xc5, - 0x92, 0x65, 0x9a, 0x09, 0xde, 0x7e, 0xc3, 0xfa, 0x96, 0x4d, 0xe8, 0x23, 0xa8, 0xd3, 0x63, 0x92, - 0xaa, 0x76, 0x75, 0xab, 0xba, 0xdd, 0xfc, 0xe0, 0x9d, 0xee, 0xbf, 0x7a, 0xec, 0x2e, 0x2a, 0x05, - 0xb5, 0x97, 0x39, 0xae, 0x84, 0x2e, 0xba, 0x5f, 0xfb, 0xfe, 0x1c, 0x57, 0x7c, 0x05, 0xab, 0x0b, - 0x37, 0xea, 0x43, 0xeb, 0x40, 0x09, 0x1e, 0x65, 0x54, 0xce, 0x98, 0x56, 0xae, 0x8f, 0xe0, 0xde, - 0x3c, 0xc7, 0x6f, 0x9f, 0x91, 0x59, 0xda, 0xf7, 0x97, 0xbd, 0x7e, 0xd8, 0x34, 0xea, 0xc8, 0x69, - 0xe8, 0x01, 0xbc, 0x79, 0xa0, 0xa2, 0x58, 0x4c, 0xa8, 0x6b, 0x31, 0x40, 0xf3, 0x1c, 0xaf, 0x2d, - 0xd2, 0xac, 0xc3, 0x0f, 0x57, 0x0e, 0xd4, 0xae, 0x11, 0x7e, 0xa8, 0xc2, 0xca, 0x88, 0x48, 0x32, - 0x53, 0xe8, 0x29, 0xac, 0x8d, 0x29, 0xe1, 0xca, 0xc0, 0x46, 0x47, 0x9c, 0xe9, 0xb6, 0x67, 0xa7, - 0x78, 0xf7, 0xc6, 0x14, 0x7b, 0x5a, 0x32, 0x9e, 0x04, 0x26, 0xb8, 0x18, 0xa4, 0x65, 0x33, 0x47, - 0x54, 0xee, 0x73, 0xa6, 0xd1, 0x73, 0x58, 0x9b, 0x52, 0x6a, 0x31, 0xa2, 0x4c, 0xb2, 0xd8, 0x34, - 0xe2, 0xf8, 0x70, 0xcb, 0xe8, 0x9a, 0x65, 0x74, 0x8b, 0x65, 0x74, 0x77, 0x05, 0xe3, 0xc1, 0x23, - 0x03, 0xf3, 0xcb, 0xef, 0x78, 0x3b, 0x61, 0xfa, 0xd9, 0xd1, 0xb8, 0x1b, 0x8b, 0x59, 0xaf, 0xd8, - 0x9c, 0xfb, 0x3c, 0x54, 0x93, 0xc3, 0x9e, 0x3e, 0xcb, 0xa8, 0xb2, 0x09, 0x2a, 0x6c, 0x4d, 0x29, - 0x35, 0xd5, 0x46, 0xa6, 0x00, 0x7a, 0x04, 0x1b, 0x63, 0x21, 0xb4, 0xd2, 0x92, 0x64, 0xd1, 0x31, - 0xd1, 0x51, 0x2c, 0xf8, 0x94, 0x25, 0xed, 0xaa, 0x5d, 0x12, 0x2a, 0x7d, 0x5f, 0x12, 0xbd, 0x6b, - 0x3d, 0xe8, 0x53, 0x58, 0xcf, 0xc4, 0x09, 0x95, 0xd1, 0x34, 0x25, 0x49, 0x34, 0xa5, 0x54, 0xb5, - 0x6b, 0xb6, 0xcb, 0xfb, 0x37, 0xe6, 0x1d, 0x99, 0xb8, 0x27, 0x29, 0x49, 0x9e, 0x50, 0x5a, 0x0c, - 0x7c, 0x27, 0x5b, 0xb2, 0x29, 0xf4, 0x09, 0x34, 0x9e, 0x1f, 0xd1, 0x23, 0x1a, 0xcd, 0xc8, 0x69, - 0xbb, 0x6e, 0x61, 0x36, 0x6f, 0xc0, 0x7c, 0x61, 0x22, 0xf6, 0xd8, 0x8b, 0x05, 0xc6, 0xaa, 0x4d, - 0xf9, 0x8c, 0x9c, 0xf6, 0x57, 0x7f, 0x3e, 0xc7, 0x95, 0x3f, 0xcf, 0xb1, 0xe7, 0x7f, 0x0e, 0xf5, - 0x3d, 0x4d, 0x34, 0x45, 0x43, 0xb8, 0xe3, 0x10, 0x49, 0x9a, 0x8a, 0x13, 0x3a, 0x29, 0x96, 0xf1, - 0xff, 0xa8, 0x2d, 0x9b, 0x36, 0x70, 0x59, 0x7e, 0x0a, 0xcd, 0xa5, 0x6d, 0xa1, 0xbb, 0x50, 0x3d, - 0xa4, 0x67, 0xc5, 0xb1, 0x36, 0x22, 0x1a, 0x42, 0xdd, 0xee, 0xae, 0x38, 0x2b, 0x3d, 0x83, 0xf1, - 0x5b, 0x8e, 0xdf, 0xbf, 0xc5, 0x1e, 0xf6, 0x19, 0xd7, 0xa1, 0xcb, 0xee, 0xd7, 0x6c, 0xf7, 0x3f, - 0x79, 0xd0, 0x5a, 0x26, 0x0b, 0xdd, 0x07, 0xb8, 0x26, 0xb9, 0x28, 0xdb, 0x28, 0xa9, 0x43, 0xdf, - 0x40, 0x75, 0x4a, 0x5f, 0xcb, 0xe9, 0x30, 0xb8, 0x45, 0x53, 0x1f, 0x43, 0xa3, 0xe4, 0xe8, 0x3f, - 0x08, 0x40, 0x50, 0x53, 0xec, 0x85, 0xbb, 0x2b, 0xf5, 0xd0, 0xca, 0x45, 0xe2, 0xdf, 0x1e, 0xac, - 0x0c, 0x13, 0x49, 0x95, 0x42, 0x8f, 0x61, 0x95, 0xb3, 0xf8, 0x90, 0x93, 0x59, 0xf1, 0x4f, 0x08, - 0xf0, 0x55, 0x8e, 0x4b, 0xdb, 0x3c, 0xc7, 0xeb, 0xee, 0x82, 0x2d, 0x2c, 0x7e, 0x58, 0x3a, 0xd1, - 0xd7, 0x50, 0xcb, 0x28, 0x95, 0xb6, 0x42, 0x2b, 0x78, 0x7a, 0x95, 0x63, 0xab, 0xcf, 0x73, 0xdc, - 0x74, 0x49, 0x46, 0xf3, 0xff, 0xca, 0xf1, 0xc3, 0x5b, 0x8c, 0x37, 0x88, 0xe3, 0xc1, 0x64, 0x62, - 0x9a, 0x0a, 0x2d, 0x0a, 0x0a, 0xa1, 0x79, 0x4d, 0xb1, 0xfb, 0xf3, 0x34, 0x82, 0x9d, 0x8b, 0x1c, - 0x43, 0xb9, 0x09, 0x75, 0x95, 0x63, 0x28, 0x59, 0x57, 0xf3, 0x1c, 0xbf, 0x55, 0x14, 0x2e, 0x6d, - 0x7e, 0xb8, 0x14, 0x60, 0xe7, 0xaf, 0xf8, 0xdf, 0x7a, 0xf0, 0xde, 0xf0, 0x54, 0x53, 0xae, 0x98, - 0xe0, 0x7b, 0x9c, 0x64, 0xea, 0x99, 0xd0, 0x9a, 0xca, 0x81, 0xd4, 0x6c, 0x4a, 0x62, 0x3d, 0x22, - 0x67, 0xa9, 0x20, 0x13, 0xf4, 0x00, 0x6a, 0x4b, 0xc4, 0xdc, 0x33, 0xf3, 0x15, 0xa4, 0x14, 0xf3, - 0x39, 0x42, 0xac, 0xd1, 0x04, 0x4f, 0x88, 0x26, 0x05, 0x19, 0x36, 0xd8, 0xe8, 0xd7, 0xc1, 0x46, - 0xf3, 0x43, 0x6b, 0x74, 0x7d, 0x04, 0xfb, 0x2f, 0x2f, 0x3a, 0xde, 0xab, 0x8b, 0x8e, 0xf7, 0xc7, - 0x45, 0xc7, 0xfb, 0xf1, 0xb2, 0x53, 0x79, 0x75, 0xd9, 0xa9, 0xfc, 0x7a, 0xd9, 0xa9, 0x7c, 0xf5, - 0x78, 0x89, 0xb0, 0x81, 0x7b, 0x2e, 0xdc, 0xf5, 0xb0, 0x84, 0x25, 0x22, 0x25, 0x3c, 0x59, 0x30, - 0x79, 0x7a, 0xfd, 0x92, 0x58, 0x26, 0xc7, 0x2b, 0xf6, 0x01, 0xf8, 0xf0, 0x9f, 0x00, 0x00, 0x00, - 0xff, 0xff, 0xd2, 0xc0, 0xc3, 0x71, 0x69, 0x06, 0x00, 0x00, + // 842 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0xcf, 0x6f, 0xe3, 0x44, + 0x14, 0x8e, 0x49, 0x52, 0x9a, 0x97, 0x6c, 0xbb, 0x0c, 0x95, 0x36, 0x54, 0x6c, 0xa6, 0xf2, 0x85, + 0x4a, 0xab, 0x8d, 0xb7, 0x20, 0x84, 0x94, 0x15, 0x87, 0xb8, 0xea, 0x6a, 0x25, 0x04, 0x0a, 0x8e, + 0xca, 0x01, 0x81, 0xac, 0x89, 0x33, 0x31, 0xd3, 0x3a, 0x1e, 0xef, 0xcc, 0xf4, 0xd7, 0xfe, 0x03, + 0x70, 0x41, 0x42, 0x9c, 0x38, 0xf6, 0xcc, 0x5f, 0xb2, 0xc7, 0x3d, 0x22, 0x0e, 0x06, 0xb5, 0x17, + 0xd4, 0x63, 0x8e, 0x48, 0x48, 0x68, 0x66, 0x1c, 0xc7, 0xa2, 0x48, 0xf4, 0xc2, 0x29, 0xf3, 0x7e, + 0x7d, 0xef, 0x7d, 0xdf, 0x1b, 0x4f, 0xa0, 0x47, 0x62, 0x2e, 0x58, 0xe4, 0xc9, 0x33, 0x96, 0xc6, + 0x92, 0xaa, 0xf2, 0xd0, 0xcf, 0x04, 0x57, 0x1c, 0x6d, 0xda, 0x78, 0x7f, 0xe9, 0xde, 0xde, 0x8a, + 0x79, 0xcc, 0x4d, 0xcc, 0xd3, 0x27, 0x9b, 0xb6, 0xdd, 0x8b, 0xb8, 0x9c, 0x73, 0xe9, 0x4d, 0x88, + 0xa4, 0xde, 0xe9, 0xde, 0x84, 0x2a, 0xb2, 0xe7, 0x45, 0x9c, 0xa5, 0x36, 0xee, 0x7e, 0xeb, 0xc0, + 0xfd, 0x7d, 0x2e, 0xe8, 0xc1, 0x29, 0x49, 0x46, 0x82, 0x67, 0x5c, 0x92, 0x04, 0x6d, 0x41, 0x53, + 0x31, 0x95, 0xd0, 0xae, 0xb3, 0xe3, 0xec, 0xb6, 0x02, 0x6b, 0xa0, 0x1d, 0x68, 0x4f, 0xa9, 0x8c, + 0x04, 0xcb, 0x14, 0xe3, 0x69, 0xf7, 0x0d, 0x13, 0xab, 0xba, 0xd0, 0x87, 0xd0, 0xa4, 0xa7, 0x24, + 0x91, 0xdd, 0xfa, 0x4e, 0x7d, 0xb7, 0xfd, 0xfe, 0x3b, 0xfd, 0x7f, 0xcc, 0xd8, 0x5f, 0x76, 0xf2, + 0x1b, 0xaf, 0x72, 0x5c, 0x0b, 0x6c, 0xf6, 0xa0, 0xf1, 0xdd, 0x25, 0xae, 0xb9, 0x12, 0xd6, 0x97, + 0x61, 0x34, 0x80, 0xce, 0x91, 0xe4, 0x69, 0x98, 0x51, 0x31, 0x67, 0x4a, 0xda, 0x39, 0xfc, 0x07, + 0x8b, 0x1c, 0xbf, 0x7d, 0x41, 0xe6, 0xc9, 0xc0, 0xad, 0x46, 0xdd, 0xa0, 0xad, 0xcd, 0x91, 0xb5, + 0xd0, 0x23, 0x78, 0xf3, 0x48, 0x86, 0x11, 0x9f, 0x52, 0x3b, 0xa2, 0x8f, 0x16, 0x39, 0xde, 0x58, + 0x96, 0x99, 0x80, 0x1b, 0xac, 0x1d, 0xc9, 0x7d, 0x7d, 0xf8, 0xbe, 0x0e, 0x6b, 0x23, 0x22, 0xc8, + 0x5c, 0xa2, 0xe7, 0xb0, 0x31, 0xa1, 0x24, 0x95, 0x1a, 0x36, 0x3c, 0x49, 0x99, 0xea, 0x3a, 0x86, + 0xc5, 0xbb, 0xb7, 0x58, 0x8c, 0x95, 0x60, 0x69, 0xec, 0xeb, 0xe4, 0x82, 0x48, 0xc7, 0x54, 0x8e, + 0xa8, 0x38, 0x4c, 0x99, 0x42, 0x2f, 0x60, 0x63, 0x46, 0xa9, 0xc1, 0x08, 0x33, 0xc1, 0x22, 0x3d, + 0x88, 0xd5, 0xc3, 0x2e, 0xa3, 0xaf, 0x97, 0xd1, 0x2f, 0x96, 0xd1, 0xdf, 0xe7, 0x2c, 0xf5, 0x9f, + 0x68, 0x98, 0x9f, 0x7f, 0xc3, 0xbb, 0x31, 0x53, 0xdf, 0x9c, 0x4c, 0xfa, 0x11, 0x9f, 0x7b, 0xc5, + 0xe6, 0xec, 0xcf, 0x63, 0x39, 0x3d, 0xf6, 0xd4, 0x45, 0x46, 0xa5, 0x29, 0x90, 0x41, 0x67, 0x46, + 0xa9, 0xee, 0x36, 0xd2, 0x0d, 0xd0, 0x13, 0xd8, 0x9a, 0x70, 0xae, 0xa4, 0x12, 0x24, 0x0b, 0x4f, + 0x89, 0x0a, 0x23, 0x9e, 0xce, 0x58, 0xdc, 0xad, 0x9b, 0x25, 0xa1, 0x32, 0xf6, 0x05, 0x51, 0xfb, + 0x26, 0x82, 0x3e, 0x81, 0xcd, 0x8c, 0x9f, 0x51, 0x11, 0xce, 0x12, 0x12, 0x87, 0x33, 0x4a, 0x65, + 0xb7, 0x61, 0xa6, 0x7c, 0x78, 0x8b, 0xef, 0x48, 0xe7, 0x3d, 0x4b, 0x48, 0xfc, 0x8c, 0xd2, 0x82, + 0xf0, 0xbd, 0xac, 0xe2, 0x93, 0xe8, 0x63, 0x68, 0xbd, 0x38, 0xa1, 0x27, 0x34, 0x9c, 0x93, 0xf3, + 0x6e, 0xd3, 0xc0, 0x6c, 0xdf, 0x82, 0xf9, 0x5c, 0x67, 0x8c, 0xd9, 0xcb, 0x25, 0xc6, 0xba, 0x29, + 0xf9, 0x94, 0x9c, 0x0f, 0xd6, 0x7f, 0xba, 0xc4, 0xb5, 0x3f, 0x2e, 0xb1, 0xe3, 0x7e, 0x06, 0xcd, + 0xb1, 0x22, 0x8a, 0xa2, 0x03, 0xb8, 0x67, 0x11, 0x49, 0x92, 0xf0, 0x33, 0x3a, 0x2d, 0x96, 0xf1, + 0xdf, 0xa8, 0x1d, 0x53, 0x36, 0xb4, 0x55, 0x6e, 0x02, 0xed, 0xca, 0xb6, 0xd0, 0x7d, 0xa8, 0x1f, + 0xd3, 0x8b, 0xe2, 0x5a, 0xeb, 0x23, 0x3a, 0x80, 0xa6, 0xd9, 0x5d, 0x71, 0x57, 0x3c, 0x8d, 0xf1, + 0x6b, 0x8e, 0xdf, 0xbb, 0xc3, 0x1e, 0x0e, 0x59, 0xaa, 0x02, 0x5b, 0x3d, 0x68, 0x98, 0xe9, 0x7f, + 0x74, 0xa0, 0x53, 0x15, 0x0b, 0x3d, 0x04, 0x58, 0x89, 0x5c, 0xb4, 0x6d, 0x95, 0xd2, 0xa1, 0xaf, + 0xa1, 0x3e, 0xa3, 0xff, 0xcb, 0xed, 0xd0, 0xb8, 0xc5, 0x50, 0x1f, 0x41, 0xab, 0xd4, 0xe8, 0x5f, + 0x04, 0x40, 0xd0, 0x90, 0xec, 0xa5, 0xfd, 0x56, 0x9a, 0x81, 0x39, 0x17, 0x85, 0x7f, 0x39, 0xb0, + 0x76, 0x10, 0x0b, 0x2a, 0x25, 0x7a, 0x0a, 0xeb, 0x29, 0x8b, 0x8e, 0x53, 0x32, 0x2f, 0xde, 0x04, + 0x1f, 0xdf, 0xe4, 0xb8, 0xf4, 0x2d, 0x72, 0xbc, 0x69, 0x3f, 0xb0, 0xa5, 0xc7, 0x0d, 0xca, 0x20, + 0xfa, 0x0a, 0x1a, 0x19, 0xa5, 0xc2, 0x74, 0xe8, 0xf8, 0xcf, 0x6f, 0x72, 0x6c, 0xec, 0x45, 0x8e, + 0xdb, 0xb6, 0x48, 0x5b, 0xee, 0x9f, 0x39, 0x7e, 0x7c, 0x07, 0x7a, 0xc3, 0x28, 0x1a, 0x4e, 0xa7, + 0x7a, 0xa8, 0xc0, 0xa0, 0xa0, 0x00, 0xda, 0x2b, 0x89, 0xed, 0xcb, 0xd3, 0xf2, 0xf7, 0xae, 0x72, + 0x0c, 0xe5, 0x26, 0xe4, 0x4d, 0x8e, 0xa1, 0x54, 0x5d, 0x2e, 0x72, 0xfc, 0x56, 0xd1, 0xb8, 0xf4, + 0xb9, 0x41, 0x25, 0xc1, 0xf0, 0xaf, 0xb9, 0x0a, 0xd0, 0x58, 0xdf, 0xb2, 0xb1, 0xe2, 0x82, 0x0e, + 0x85, 0x62, 0x33, 0x12, 0x29, 0xf4, 0x08, 0x1a, 0x15, 0x19, 0x1e, 0x68, 0x36, 0x85, 0x04, 0x05, + 0x1b, 0x4b, 0xdf, 0x38, 0x75, 0xf2, 0x94, 0x28, 0x52, 0x50, 0x37, 0xc9, 0xda, 0x5e, 0x25, 0x6b, + 0xcb, 0x0d, 0x8c, 0xd3, 0x76, 0xf5, 0x0f, 0x5f, 0x5d, 0xf5, 0x9c, 0xd7, 0x57, 0x3d, 0xe7, 0xf7, + 0xab, 0x9e, 0xf3, 0xc3, 0x75, 0xaf, 0xf6, 0xfa, 0xba, 0x57, 0xfb, 0xe5, 0xba, 0x57, 0xfb, 0xf2, + 0x69, 0x45, 0x9e, 0xa1, 0xfd, 0x73, 0xb0, 0x1f, 0x83, 0x91, 0x27, 0xe6, 0x09, 0x49, 0xe3, 0xa5, + 0x6e, 0xe7, 0xab, 0xff, 0x0d, 0xa3, 0xdb, 0x64, 0xcd, 0x3c, 0xf7, 0x1f, 0xfc, 0x1d, 0x00, 0x00, + 0xff, 0xff, 0xee, 0x34, 0x5f, 0xf6, 0x57, 0x06, 0x00, 0x00, } func (this *Params) Equal(that interface{}) bool { @@ -1138,7 +1140,7 @@ func (m *Egress) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *ExtensionSnapshotterArtifactPayload) Marshal() (dAtA []byte, err error) { +func (m *SwingStoreArtifact) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1148,12 +1150,12 @@ func (m *ExtensionSnapshotterArtifactPayload) Marshal() (dAtA []byte, err error) return dAtA[:n], nil } -func (m *ExtensionSnapshotterArtifactPayload) MarshalTo(dAtA []byte) (int, error) { +func (m *SwingStoreArtifact) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *ExtensionSnapshotterArtifactPayload) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *SwingStoreArtifact) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1351,7 +1353,7 @@ func (m *Egress) Size() (n int) { return n } -func (m *ExtensionSnapshotterArtifactPayload) Size() (n int) { +func (m *SwingStoreArtifact) Size() (n int) { if m == nil { return 0 } @@ -2419,7 +2421,7 @@ func (m *Egress) Unmarshal(dAtA []byte) error { } return nil } -func (m *ExtensionSnapshotterArtifactPayload) Unmarshal(dAtA []byte) error { +func (m *SwingStoreArtifact) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -2442,10 +2444,10 @@ func (m *ExtensionSnapshotterArtifactPayload) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: ExtensionSnapshotterArtifactPayload: wiretype end group for non-group") + return fmt.Errorf("proto: SwingStoreArtifact: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: ExtensionSnapshotterArtifactPayload: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: SwingStoreArtifact: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: From 544c72030aef29cd0c82eff40892cbe45d47a77f Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 28 Jul 2023 14:35:45 +0000 Subject: [PATCH 052/109] refactor(cosmic-swingset): consolidate init and bootstrap --- golang/cosmos/app/app.go | 36 +++++--------------- packages/cosmic-swingset/src/chain-main.js | 6 ++-- packages/cosmic-swingset/src/launch-chain.js | 11 +++--- packages/cosmic-swingset/src/sim-chain.js | 5 +-- packages/internal/src/action-types.js | 2 +- 5 files changed, 22 insertions(+), 38 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 3fb2d0b4d41..82e28a086ad 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -828,6 +828,7 @@ func normalizeModuleAccount(ctx sdk.Context, ak authkeeper.AccountKeeper, name s type cosmosInitAction struct { Type string `json:"type"` ChainID string `json:"chainID"` + BlockTime int64 `json:"blockTime,omitempty"` IsBootstrap bool `json:"isBootstrap"` Params swingset.Params `json:"params"` SupplyCoins sdk.Coins `json:"supplyCoins"` @@ -850,11 +851,6 @@ func (app *GaiaApp) CheckControllerInited(expected bool) { } } -type bootstrapBlockAction struct { - Type string `json:"type"` - BlockTime int64 `json:"blockTime"` -} - // initController sends the initialization message to the VM. // Exits if the controller has already been initialized. // The init message will contain any upgrade plan if we're starting after an @@ -862,11 +858,17 @@ type bootstrapBlockAction struct { func (app *GaiaApp) initController(ctx sdk.Context, bootstrap bool) { app.CheckControllerInited(false) app.controllerInited = true + + var blockTime int64 = 0 + if bootstrap || app.upgradePlan != nil { + blockTime = ctx.BlockTime().Unix() + } + // Begin initializing the controller here. - var action vm.Jsonable - action = &cosmosInitAction{ + action := &cosmosInitAction{ Type: "AG_COSMOS_INIT", ChainID: ctx.ChainID(), + BlockTime: blockTime, IsBootstrap: bootstrap, Params: app.SwingSetKeeper.GetParams(ctx), SupplyCoins: sdk.NewCoins(app.BankKeeper.GetSupply(ctx, "uist")), @@ -892,26 +894,6 @@ func (app *GaiaApp) initController(ctx sdk.Context, bootstrap bool) { if !res { panic(fmt.Errorf("controller negative init response")) } - - if !bootstrap { - return - } - - stdlog.Println("Running SwingSet until bootstrap is ready") - // Just run the SwingSet kernel to finish bootstrap and get ready to open for - // business. - action = &bootstrapBlockAction{ - Type: "BOOTSTRAP_BLOCK", - BlockTime: ctx.BlockTime().Unix(), - } - - _, err = app.SwingSetKeeper.BlockingSend(ctx, action) - // fmt.Fprintf(os.Stderr, "BOOTSTRAP_BLOCK Returned from swingset: %s, %v\n", out, err) - if err != nil { - // NOTE: A failed BOOTSTRAP_BLOCK means that the SwingSet state is inconsistent. - // Panic here, in the hopes that a replay from scratch will fix the problem. - panic(err) - } } // ensureControllerInited inits the controller if needed. It's used by the diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 4f13859f270..5574687fd2c 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -45,8 +45,6 @@ import { performStateSyncImport } from './import-kernel-db.js'; // eslint-disable-next-line no-unused-vars let whenHellFreezesOver = null; -const AG_COSMOS_INIT = 'AG_COSMOS_INIT'; - const TELEMETRY_SERVICE_NAME = 'agd-cosmos'; const toNumber = specimen => { @@ -619,7 +617,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { await null; switch (action.type) { - case AG_COSMOS_INIT: { + case ActionType.AG_COSMOS_INIT: { // console.error('got AG_COSMOS_INIT', action); !blockingSend || Fail`Swingset already initialized`; @@ -644,7 +642,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { // Ensure that initialization has completed. blockingSend = await launchAndInitializeSwingSet(action); - return true; + return blockingSend(action); } // Snapshot actions are specific to cosmos chains and handled here diff --git a/packages/cosmic-swingset/src/launch-chain.js b/packages/cosmic-swingset/src/launch-chain.js index 0699c39bba7..1e876dc6bc5 100644 --- a/packages/cosmic-swingset/src/launch-chain.js +++ b/packages/cosmic-swingset/src/launch-chain.js @@ -673,7 +673,7 @@ export async function launch({ // Handle block related actions // Some actions that are integration specific may be handled by the caller - // For example COSMOS_SNAPSHOT and AG_COSMOS_INIT are handled in chain-main.js + // For example COSMOS_SNAPSHOT is handled in chain-main.js async function blockingSend(action) { if (decohered) { throw decohered; @@ -688,9 +688,12 @@ export async function launch({ // action.type, // ); switch (action.type) { - case ActionType.BOOTSTRAP_BLOCK: { + case ActionType.AG_COSMOS_INIT: { + const { isBootstrap, blockTime } = action; // This only runs for the very first block on the chain. - const { blockTime } = action; + if (!isBootstrap) { + return true; + } verboseBlocks && blockManagerConsole.info('block bootstrap'); if (savedHeight !== 0) { throw Error(`Cannot run a bootstrap block at height ${savedHeight}`); @@ -719,7 +722,7 @@ export async function launch({ type: 'cosmic-swingset-bootstrap-block-finish', blockTime, }); - return undefined; + return true; } case ActionType.COMMIT_BLOCK: { diff --git a/packages/cosmic-swingset/src/sim-chain.js b/packages/cosmic-swingset/src/sim-chain.js index f1e5b0e8f71..685c4674b37 100644 --- a/packages/cosmic-swingset/src/sim-chain.js +++ b/packages/cosmic-swingset/src/sim-chain.js @@ -202,11 +202,12 @@ export async function connectToFakeChain(basedir, GCI, delay, inbound) { return; } // The before-first-block is special... do it now. - // This emulates what x/swingset does to run a BOOTSTRAP_BLOCK + // This emulates what x/swingset does when bootstrapping // before continuing with the real initialHeight. await blockingSend({ - type: 'BOOTSTRAP_BLOCK', + type: 'AG_COSMOS_INIT', blockTime: scaleBlockTime(Date.now()), + isBootstrap: true, }); blockHeight = initialHeight; }; diff --git a/packages/internal/src/action-types.js b/packages/internal/src/action-types.js index 2d5a3d77ce2..d6a4b2b5333 100644 --- a/packages/internal/src/action-types.js +++ b/packages/internal/src/action-types.js @@ -1,6 +1,6 @@ // @jessie-check -export const BOOTSTRAP_BLOCK = 'BOOTSTRAP_BLOCK'; +export const AG_COSMOS_INIT = 'AG_COSMOS_INIT'; export const COSMOS_SNAPSHOT = 'COSMOS_SNAPSHOT'; export const BEGIN_BLOCK = 'BEGIN_BLOCK'; export const CALCULATE_FEES_IN_BEANS = 'CALCULATE_FEES_IN_BEANS'; From ee0daf3235ab5c7634995b0dd0812b2a291880ba Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sun, 23 Jul 2023 18:07:34 +0000 Subject: [PATCH 053/109] fix(deployment): propagate `ag-setup-cosmos` path to faucet script remove remnants of `ag-setup-cosmos` symlink --- packages/deployment/scripts/integration-test.sh | 2 +- scripts/run-deployment-integration.sh | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/deployment/scripts/integration-test.sh b/packages/deployment/scripts/integration-test.sh index 65a3809b391..59025808a07 100755 --- a/packages/deployment/scripts/integration-test.sh +++ b/packages/deployment/scripts/integration-test.sh @@ -69,7 +69,7 @@ then cp ag-chain-cosmos/data/genesis.json "$RESULTSDIR/genesis.json" cp "$AG_SETUP_COSMOS_HOME/ag-chain-cosmos/data/genesis.json" "$RESULTSDIR/genesis.json" cd "$LOADGEN" - SOLO_COINS=40000000000uist \ + SOLO_COINS=40000000000uist PATH="$thisdir/../bin:$PATH" \ "$AG_SETUP_COSMOS_HOME/faucet-helper.sh" add-egress loadgen "$SOLO_ADDR" SLOGSENDER=@agoric/telemetry/src/otel-trace.js SOLO_SLOGSENDER="" \ SLOGSENDER_FAIL_ON_ERROR=1 SLOGSENDER_AGENT=process \ diff --git a/scripts/run-deployment-integration.sh b/scripts/run-deployment-integration.sh index 251ca22459e..0ec3d0aa51e 100644 --- a/scripts/run-deployment-integration.sh +++ b/scripts/run-deployment-integration.sh @@ -11,8 +11,6 @@ export AGORIC_SDK_PATH="${AGORIC_SDK_PATH-$SDK_SRC}" export NETWORK_NAME=chaintest -sudo ln -sf "$SDK_SRC/packages/deployment/bin/ag-setup-cosmos" /usr/local/bin/ag-setup-cosmos - # Note: the deployment test and the loadgen test in testnet mode modify some # directories in $HOME so provide an empty $HOME for them. export HOME="$(mktemp -d -t deployment-integration-home.XXXXX)" From 5320a30a873455764104e13d89131e30a93a238c Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 14 Jul 2023 22:26:54 +0000 Subject: [PATCH 054/109] fix(x/swingset): guard snapshot restore for concurrency --- .../cosmos/x/swingset/keeper/snapshotter.go | 54 ++++++++++++++++--- .../x/swingset/keeper/snapshotter_test.go | 10 ++++ 2 files changed, 57 insertions(+), 7 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index da402b89b80..d0d4b738c64 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -41,6 +41,8 @@ func sanitizeArtifactName(name string) string { } type activeSnapshot struct { + // Whether the operation in progress is a restore + isRestore bool // The block height of the snapshot in progress height int64 // The logger for this snapshot @@ -111,19 +113,33 @@ func NewSwingsetSnapshotter( } } +// checkNotActive returns an error if there is an active snapshot. +func (snapshotter *SwingsetSnapshotter) checkNotActive() error { + active := snapshotter.activeSnapshot + if active != nil { + select { + case <-active.done: + snapshotter.activeSnapshot = nil + default: + if active.isRestore { + return fmt.Errorf("snapshot restore already in progress for height %d", active.height) + } else { + return fmt.Errorf("snapshot already in progress for height %d", active.height) + } + } + } + return nil +} + // InitiateSnapshot synchronously initiates a snapshot for the given height. // If a snapshot is already in progress, or if no snapshot manager is configured, // this will fail. // The snapshot operation is performed in a goroutine, and synchronized with the // main thread through the `WaitUntilSnapshotStarted` method. func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { - if snapshotter.activeSnapshot != nil { - select { - case <-snapshotter.activeSnapshot.done: - snapshotter.activeSnapshot = nil - default: - return fmt.Errorf("snapshot already in progress for height %d", snapshotter.activeSnapshot.height) - } + err := snapshotter.checkNotActive() + if err != nil { + return err } if !snapshotter.isConfigured() { @@ -366,6 +382,30 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u return snapshots.ErrUnknownFormat } + err := snapshotter.checkNotActive() + if err != nil { + return err + } + + // We technically don't need to create an active snapshot here since both + // `InitiateSnapshot` and `RestoreExtension` should only be called from the + // main thread, but it doesn't cost much to add in case things go wrong. + active := &activeSnapshot{ + isRestore: true, + height: int64(height), + logger: snapshotter.logger, + // goroutine synchronization is unnecessary since anything checking should + // be called from the same thread. + // Effectively `WaitUntilSnapshotStarted` would block infinitely and + // and `InitiateSnapshot` will error when calling `checkNotActive`. + startedResult: nil, + done: nil, + } + snapshotter.activeSnapshot = active + defer func() { + snapshotter.activeSnapshot = nil + }() + ctx := snapshotter.newRestoreContext(int64(height)) exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-state-sync-restore-%d-*", height)) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/snapshotter_test.go index 0c1474879e3..196dad37eea 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter_test.go @@ -2,6 +2,7 @@ package keeper import ( "errors" + "io" "testing" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" @@ -40,6 +41,15 @@ func TestSnapshotInProgress(t *testing.T) { t.Error("wanted error for snapshot in progress") } + err = swingsetSnapshotter.RestoreExtension( + 456, SnapshotFormat, + func() ([]byte, error) { + return nil, io.EOF + }) + if err == nil { + t.Error("wanted error for snapshot in progress") + } + close(ch) <-swingsetSnapshotter.activeSnapshot.done err = swingsetSnapshotter.InitiateSnapshot(456) From c2e2a425856288f4b4a36045672b75cfe2231fd9 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 18 Jul 2023 21:59:56 +0000 Subject: [PATCH 055/109] refactor(x/swingset): consistent height handling in snapshotter --- .../cosmos/x/swingset/keeper/snapshotter.go | 57 +++++++++++-------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index 286253e3b30..9e023c4d56a 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "io" + "math" "os" "path/filepath" "regexp" @@ -44,7 +45,7 @@ type activeSnapshot struct { // Whether the operation in progress is a restore isRestore bool // The block height of the snapshot in progress - height int64 + blockHeight uint64 // The logger for this snapshot logger log.Logger // Use to synchronize the commit boundary @@ -77,7 +78,7 @@ type SwingsetSnapshotter struct { type snapshotAction struct { Type string `json:"type"` // COSMOS_SNAPSHOT - BlockHeight int64 `json:"blockHeight"` + BlockHeight uint64 `json:"blockHeight"` Request string `json:"request"` // "initiate", "discard", "retrieve", or "restore" Args []json.RawMessage `json:"args,omitempty"` } @@ -122,9 +123,9 @@ func (snapshotter *SwingsetSnapshotter) checkNotActive() error { snapshotter.activeSnapshot = nil default: if active.isRestore { - return fmt.Errorf("snapshot restore already in progress for height %d", active.height) + return fmt.Errorf("snapshot restore already in progress for height %d", active.blockHeight) } else { - return fmt.Errorf("snapshot already in progress for height %d", active.height) + return fmt.Errorf("snapshot already in progress for height %d", active.blockHeight) } } } @@ -141,18 +142,23 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { if err != nil { return err } + if height <= 0 { + return fmt.Errorf("block height must not be negative or 0") + } + + blockHeight := uint64(height) if !snapshotter.isConfigured() { return fmt.Errorf("snapshot manager not configured") } - logger := snapshotter.logger.With("height", height) + logger := snapshotter.logger.With("height", blockHeight) // Indicate that a snapshot has been initiated by setting `activeSnapshot`. // This structure is used to synchronize with the goroutine spawned below. // It's nilled-out before exiting (and is the only code that does so). active := &activeSnapshot{ - height: height, + blockHeight: blockHeight, logger: logger, startedResult: make(chan error, 1), retrieved: false, @@ -165,7 +171,7 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { action := &snapshotAction{ Type: "COSMOS_SNAPSHOT", - BlockHeight: height, + BlockHeight: blockHeight, Request: "initiate", } @@ -198,7 +204,7 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { logger.Error("failed to make swingset snapshot") action = &snapshotAction{ Type: "COSMOS_SNAPSHOT", - BlockHeight: height, + BlockHeight: blockHeight, Request: "discard", } _, err = snapshotter.blockingSend(action, false) @@ -266,7 +272,7 @@ func (snapshotter *SwingsetSnapshotter) SupportedFormats() []uint32 { // This operation is invoked by the snapshot manager in the goroutine started by // `InitiateSnapshot`. // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SnapshotExtension(height uint64, payloadWriter snapshots.ExtensionPayloadWriter) (err error) { +func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, payloadWriter snapshots.ExtensionPayloadWriter) (err error) { defer func() { // Since the cosmos layers do a poor job of reporting errors, do our own reporting // `err` will be set correctly regardless if it was explicitly assigned or @@ -290,13 +296,13 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(height uint64, payload return errors.New("no active swingset snapshot") } - if activeSnapshot.height != int64(height) { - return fmt.Errorf("swingset snapshot requested for unexpected height %d (expected %d)", height, activeSnapshot.height) + if activeSnapshot.blockHeight != blockHeight { + return fmt.Errorf("swingset snapshot requested for unexpected height %d (expected %d)", blockHeight, activeSnapshot.blockHeight) } action := &snapshotAction{ Type: "COSMOS_SNAPSHOT", - BlockHeight: activeSnapshot.height, + BlockHeight: blockHeight, Request: "retrieve", } out, err := snapshotter.blockingSend(action, false) @@ -324,8 +330,8 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(height uint64, payload return err } - if manifest.BlockHeight != height { - return fmt.Errorf("snapshot manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, height) + if manifest.BlockHeight != blockHeight { + return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, blockHeight) } writeFileToPayload := func(fileName string, artifactName string) error { @@ -377,11 +383,16 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(height uint64, payload // RestoreExtension restores an extension state snapshot, // the payload reader returns `io.EOF` when it reaches the extension boundaries. // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format uint32, payloadReader snapshots.ExtensionPayloadReader) error { +func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, format uint32, payloadReader snapshots.ExtensionPayloadReader) error { if format != SnapshotFormat { return snapshots.ErrUnknownFormat } + if blockHeight > math.MaxInt64 { + return fmt.Errorf("snapshot block height %d is higher than max int64", blockHeight) + } + height := int64(blockHeight) + err := snapshotter.checkNotActive() if err != nil { return err @@ -391,9 +402,9 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u // `InitiateSnapshot` and `RestoreExtension` should only be called from the // main thread, but it doesn't cost much to add in case things go wrong. active := &activeSnapshot{ - isRestore: true, - height: int64(height), - logger: snapshotter.logger, + isRestore: true, + blockHeight: blockHeight, + logger: snapshotter.logger, // goroutine synchronization is unnecessary since anything checking should // be called from the same thread. // Effectively `WaitUntilSnapshotStarted` would block infinitely and @@ -406,16 +417,16 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u snapshotter.activeSnapshot = nil }() - ctx := snapshotter.newRestoreContext(int64(height)) + ctx := snapshotter.newRestoreContext(height) - exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-state-sync-restore-%d-*", height)) + exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-state-sync-restore-%d-*", blockHeight)) if err != nil { return err } defer os.RemoveAll(exportDir) manifest := exportManifest{ - BlockHeight: height, + BlockHeight: blockHeight, Data: ExportDataFilename, } @@ -512,7 +523,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u action := &snapshotAction{ Type: "COSMOS_SNAPSHOT", - BlockHeight: int64(height), + BlockHeight: blockHeight, Request: "restore", Args: []json.RawMessage{encodedExportDir}, } @@ -522,7 +533,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(height uint64, format u return err } - snapshotter.logger.Info("restored snapshot", "exportDir", exportDir, "height", height) + snapshotter.logger.Info("restored snapshot", "exportDir", exportDir, "height", blockHeight) return nil } From 7803d3de8e0cba681dfd27dacfc3577eed0bf2f8 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 28 Jul 2023 16:55:34 +0000 Subject: [PATCH 056/109] feat(cosmic-swingset): add JS upgrade plan handler stub --- packages/cosmic-swingset/src/launch-chain.js | 74 ++++++++++++-------- packages/telemetry/src/slog-to-otel.js | 11 +++ 2 files changed, 55 insertions(+), 30 deletions(-) diff --git a/packages/cosmic-swingset/src/launch-chain.js b/packages/cosmic-swingset/src/launch-chain.js index 1e876dc6bc5..912fc145aaa 100644 --- a/packages/cosmic-swingset/src/launch-chain.js +++ b/packages/cosmic-swingset/src/launch-chain.js @@ -689,39 +689,53 @@ export async function launch({ // ); switch (action.type) { case ActionType.AG_COSMOS_INIT: { - const { isBootstrap, blockTime } = action; + const { isBootstrap, upgradePlan, blockTime } = action; // This only runs for the very first block on the chain. - if (!isBootstrap) { - return true; + if (isBootstrap) { + verboseBlocks && blockManagerConsole.info('block bootstrap'); + savedHeight === 0 || + Fail`Cannot run a bootstrap block at height ${savedHeight}`; + const blockHeight = 0; + const runNum = 0; + controller.writeSlogObject({ + type: 'cosmic-swingset-bootstrap-block-start', + blockTime, + }); + controller.writeSlogObject({ + type: 'cosmic-swingset-run-start', + blockHeight, + runNum, + }); + await processAction(action.type, async () => + bootstrapBlock(blockHeight, blockTime), + ); + controller.writeSlogObject({ + type: 'cosmic-swingset-run-finish', + blockHeight, + runNum, + }); + controller.writeSlogObject({ + type: 'cosmic-swingset-bootstrap-block-finish', + blockTime, + }); } - verboseBlocks && blockManagerConsole.info('block bootstrap'); - if (savedHeight !== 0) { - throw Error(`Cannot run a bootstrap block at height ${savedHeight}`); + if (upgradePlan) { + const blockHeight = upgradePlan.height; + if (blockNeedsExecution(blockHeight)) { + controller.writeSlogObject({ + type: 'cosmic-swingset-upgrade-start', + blockHeight, + blockTime, + upgradePlan, + }); + // TODO: Process upgrade plan + controller.writeSlogObject({ + type: 'cosmic-swingset-upgrade-finish', + blockHeight, + blockTime, + }); + } } - const blockHeight = 0; - const runNum = 0; - controller.writeSlogObject({ - type: 'cosmic-swingset-bootstrap-block-start', - blockTime, - }); - controller.writeSlogObject({ - type: 'cosmic-swingset-run-start', - blockHeight, - runNum, - }); - await processAction(action.type, async () => - bootstrapBlock(blockHeight, blockTime), - ); - controller.writeSlogObject({ - type: 'cosmic-swingset-run-finish', - blockHeight, - runNum, - }); - await pendingSwingStoreExport; - controller.writeSlogObject({ - type: 'cosmic-swingset-bootstrap-block-finish', - blockTime, - }); return true; } diff --git a/packages/telemetry/src/slog-to-otel.js b/packages/telemetry/src/slog-to-otel.js index 5e42756a8d0..45591d35035 100644 --- a/packages/telemetry/src/slog-to-otel.js +++ b/packages/telemetry/src/slog-to-otel.js @@ -908,6 +908,17 @@ export const makeSlogToOtelKit = (tracer, overrideAttrs = {}) => { dbTransactionManager.end(); break; } + case 'cosmic-swingset-upgrade-start': { + dbTransactionManager.begin(); + assert(!spans.top()); + spans.push(['upgrade', slogAttrs.blockHeight]); + break; + } + case 'cosmic-swingset-upgrade-finish': { + spans.pop(['slogAttrs.blockHeight', slogAttrs.blockHeight]); + dbTransactionManager.end(); + break; + } case 'cosmic-swingset-begin-block': { if (spans.topKind() === 'intra-block') { spans.pop('intra-block'); From 9dbb13c1a06cebe256c8f2ad6ab9ffd5399a971b Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 23 Jul 2023 13:00:56 -0600 Subject: [PATCH 057/109] ci(deployment-test): separate network and SDK dirs --- .github/workflows/deployment-test.yml | 20 ++++++++++++-------- scripts/run-deployment-integration.sh | 17 +++++++++++------ 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/.github/workflows/deployment-test.yml b/.github/workflows/deployment-test.yml index 2ee7a5d795c..c32253ee49e 100644 --- a/.github/workflows/deployment-test.yml +++ b/.github/workflows/deployment-test.yml @@ -74,38 +74,42 @@ jobs: set -e cd packages/cosmic-swingset make install + - name: Make networks directory + run: | + set -e + mkdir networks - name: Run integration test - working-directory: ./agoric-sdk + working-directory: ./networks run: | set -xe - DOCKER_VOLUMES="$PWD:/usr/src/agoric-sdk" \ + DOCKER_VOLUMES="$PWD/../agoric-sdk:/usr/src/agoric-sdk" \ LOADGEN=1 \ - packages/deployment/scripts/integration-test.sh + ../agoric-sdk/packages/deployment/scripts/integration-test.sh timeout-minutes: 90 env: NETWORK_NAME: chaintest - name: capture results if: always() - working-directory: ./agoric-sdk + working-directory: ./networks run: | NOW=$(date -u +%Y%m%dT%H%M%S) echo "NOW=$NOW" >> "$GITHUB_ENV" # Stop the chain from running. - packages/deployment/scripts/setup.sh play stop || true + ../agoric-sdk/packages/deployment/scripts/setup.sh play stop || true # Get the results. - packages/deployment/scripts/capture-integration-results.sh "${{ job.status == 'failure' }}" + ../agoric-sdk/packages/deployment/scripts/capture-integration-results.sh "${{ job.status == 'failure' }}" # Tear down the nodes. - echo yes | packages/deployment/scripts/setup.sh destroy || true + echo yes | ../agoric-sdk/packages/deployment/scripts/setup.sh destroy || true env: NETWORK_NAME: chaintest - uses: actions/upload-artifact@v3 if: always() with: name: deployment-test-results-${{ env.NOW }} - path: ./agoric-sdk/chaintest/results + path: ./networks/chaintest/results - name: notify on failure if: failure() && github.event_name != 'pull_request' diff --git a/scripts/run-deployment-integration.sh b/scripts/run-deployment-integration.sh index 0ec3d0aa51e..eae1ab555c2 100644 --- a/scripts/run-deployment-integration.sh +++ b/scripts/run-deployment-integration.sh @@ -18,23 +18,28 @@ export HOME="$(mktemp -d -t deployment-integration-home.XXXXX)" # While it'd be great if these [tests were more hermetic](https://github.com/Agoric/agoric-sdk/issues/8059), # this manual runner must currently reset paths relative to the SDK to ensure # reproducible tests. -rm -rf "$SDK_SRC/chaintest" "$SDK_SRC/../testnet-load-generator/_agstate/agoric-servers/testnet-8000" +rm -rf "$SDK_SRC/../testnet-load-generator/_agstate/agoric-servers/testnet-8000" + +export OUTPUT_PATH="$SDK_SRC/../deployment-test-results/networks-$(date +%s)" +mkdir -p "$OUTPUT_PATH" cd "$SDK_SRC" sudo ./packages/deployment/scripts/install-deps.sh yarn install && XSNAP_RANDOM_INIT=1 yarn build && make -C packages/cosmic-swingset/ + +cd "$OUTPUT_PATH" # change to "false" to skip extraction on success like in CI testfailure="unknown" DOCKER_VOLUMES="$AGORIC_SDK_PATH:/usr/src/agoric-sdk" \ LOADGEN=1 \ -packages/deployment/scripts/integration-test.sh || { +$SDK_SRC/packages/deployment/scripts/integration-test.sh || { echo "Test failed!!!" testfailure="true" } -packages/deployment/scripts/setup.sh play stop || true -packages/deployment/scripts/capture-integration-results.sh $testfailure -echo yes | packages/deployment/scripts/setup.sh destroy || true +$SDK_SRC/packages/deployment/scripts/setup.sh play stop || true +$SDK_SRC/packages/deployment/scripts/capture-integration-results.sh $testfailure +echo yes | $SDK_SRC/packages/deployment/scripts/setup.sh destroy || true # Not part of CI -scripts/process-integration-results.sh $NETWORK_NAME/results +$SDK_SRC/scripts/process-integration-results.sh $NETWORK_NAME/results From 5dd1e0d92d8ec88b23643050304de1e375789e34 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 18 Jul 2023 22:07:48 +0000 Subject: [PATCH 058/109] chore(x/swingset): remove disabled restore using untrusted data --- .../cosmos/x/swingset/keeper/snapshotter.go | 56 ++++++++----------- 1 file changed, 23 insertions(+), 33 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index 9e023c4d56a..4a445ce97ce 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -355,13 +355,6 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa return nil } - if manifest.Data != "" { - err = writeFileToPayload(manifest.Data, UntrustedExportDataArtifactName) - if err != nil { - return err - } - } - for _, artifactInfo := range manifest.Artifacts { artifactName := artifactInfo[0] fileName := artifactInfo[1] @@ -374,6 +367,13 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } } + if manifest.Data != "" { + err = writeFileToPayload(manifest.Data, UntrustedExportDataArtifactName) + if err != nil { + return err + } + } + activeSnapshot.retrieved = true activeSnapshot.logger.Info("retrieved snapshot", "exportDir", exportDir) @@ -417,8 +417,6 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for snapshotter.activeSnapshot = nil }() - ctx := snapshotter.newRestoreContext(height) - exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-state-sync-restore-%d-*", blockHeight)) if err != nil { return err @@ -427,22 +425,23 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for manifest := exportManifest{ BlockHeight: blockHeight, - Data: ExportDataFilename, } - exportDataFile, err := os.OpenFile(filepath.Join(exportDir, ExportDataFilename), os.O_CREATE|os.O_WRONLY, ExportedFilesMode) - if err != nil { - return err - } - defer exportDataFile.Close() - // Retrieve the SwingStore "ExportData" from the verified vstorage data. // At this point the content of the cosmos DB has been verified against the // AppHash, which means the SwingStore data it contains can be used as the // trusted root against which to validate the artifacts. + ctx := snapshotter.newRestoreContext(height) swingStoreEntries := snapshotter.getSwingStoreExportData(ctx) if len(swingStoreEntries) > 0 { + manifest.Data = ExportDataFilename + exportDataFile, err := os.OpenFile(filepath.Join(exportDir, ExportDataFilename), os.O_CREATE|os.O_WRONLY, ExportedFilesMode) + if err != nil { + return err + } + defer exportDataFile.Close() + encoder := json.NewEncoder(exportDataFile) encoder.SetEscapeHTML(false) for _, dataEntry := range swingStoreEntries { @@ -452,6 +451,12 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for return err } } + + err = exportDataFile.Sync() + if err != nil { + return err + } + exportDataFile.Close() } writeExportFile := func(filename string, data []byte) error { @@ -471,8 +476,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for return err } - switch { - case artifact.Name != UntrustedExportDataArtifactName: + if artifact.Name != UntrustedExportDataArtifactName { // Artifact verifiable on import from the export data // Since we cannot trust the state-sync artifact at this point, we generate // a safe and unique filename from the artifact name we received, by @@ -483,30 +487,16 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for filename = fmt.Sprintf("%d-%s", len(manifest.Artifacts), filename) manifest.Artifacts = append(manifest.Artifacts, [2]string{artifact.Name, filename}) err = writeExportFile(filename, artifact.Data) - - case len(swingStoreEntries) > 0: + } else { // Pseudo artifact containing untrusted export data which may have been // saved separately for debugging purposes (not referenced from the manifest) err = writeExportFile(UntrustedExportDataFilename, artifact.Data) - - default: - // There is no trusted export data - err = errors.New("cannot restore from untrusted export data") - // snapshotter.logger.Info("using untrusted export data for swingstore restore") - // _, err = exportDataFile.Write(artifact.Data) } - if err != nil { return err } } - err = exportDataFile.Sync() - if err != nil { - return err - } - exportDataFile.Close() - manifestBytes, err := json.MarshalIndent(manifest, "", " ") if err != nil { return err From fa6e935e0e46f5cb78ef34e8c7b082a57b7172a3 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 27 Jul 2023 16:57:50 +0000 Subject: [PATCH 059/109] refactor(deployment): upgrade test provide agd commands --- .../upgrade-test-scripts/env_setup.sh | 20 +++++++++++++++++++ .../upgrade-test-scripts/start_to_to.sh | 11 ++++------ 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh index 7a691aecb7c..150dc4a1a66 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh @@ -54,6 +54,26 @@ if [[ "$binary" == "agd" ]]; then sed -i 's/minSubmissionCount": 3/minSubmissionCount": 1/g' /usr/src/agoric-sdk/packages/vats/*.json fi +startAgd() { + agd start --log_level warn "$@" & + AGD_PID=$! + echo $AGD_PID > $HOME/.agoric/agd.pid + wait_for_bootstrap + waitForBlock 2 +} + +killAgd() { + AGD_PID=$(cat $HOME/.agoric/agd.pid) + kill $AGD_PID + rm $HOME/.agoric/agd.pid + wait $AGD_PID || true +} + +waitAgd() { + wait $(cat $HOME/.agoric/agd.pid) + rm $HOME/.agoric/agd.pid +} + provisionSmartWallet() { i="$1" amount="$2" diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh index bdb316469e0..d8f758b9ed8 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh @@ -3,7 +3,7 @@ grep -qF 'env_setup.sh' /root/.bashrc || echo ". ./upgrade-test-scripts/env_setup.sh" >> /root/.bashrc grep -qF 'printKeys' /root/.bashrc || echo "printKeys" >> /root/.bashrc -tmux -V || apt install -y tmux +tmux -V 2>/dev/null || apt-get install -y tmux if [[ "$DEST" == "1" ]] && [[ "$TMUX" == "" ]]; then echo "launching entrypoint" @@ -14,10 +14,7 @@ fi . ./upgrade-test-scripts/env_setup.sh -agd start --log_level warn & -AGD_PID=$! -wait_for_bootstrap -waitForBlock 2 +startAgd if ! test -f "$HOME/.agoric/runActions-${THIS_NAME}"; then runActions "pre_test" @@ -61,9 +58,9 @@ if [[ "$DEST" != "1" ]]; then done sleep 2 - kill $AGD_PID + killAgd echo "ready for upgrade to $UPGRADE_TO" else - wait $AGD_PID + waitAgd fi \ No newline at end of file From f95d613bc9cc84a0696ae76ec298617be7ba5c13 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 28 Jul 2023 17:05:05 +0000 Subject: [PATCH 060/109] refactor(cosmic-swingset): factor out common blockingSend steps --- packages/cosmic-swingset/src/launch-chain.js | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/cosmic-swingset/src/launch-chain.js b/packages/cosmic-swingset/src/launch-chain.js index 912fc145aaa..cb3a78a306a 100644 --- a/packages/cosmic-swingset/src/launch-chain.js +++ b/packages/cosmic-swingset/src/launch-chain.js @@ -674,13 +674,8 @@ export async function launch({ // Handle block related actions // Some actions that are integration specific may be handled by the caller // For example COSMOS_SNAPSHOT is handled in chain-main.js - async function blockingSend(action) { - if (decohered) { - throw decohered; - } - - await afterCommitWorkDone; - + async function doBlockingSend(action) { + await null; // blockManagerConsole.warn( // 'FIGME: blockHeight', // action.blockHeight, @@ -843,7 +838,7 @@ export async function launch({ // We write out our on-chain state as a number of chainSends. const start = Date.now(); - await Promise.all([saveChainState(), pendingSwingStoreExport]); + await saveChainState(); chainTime = Date.now() - start; // Advance our saved state variables. @@ -866,6 +861,15 @@ export async function launch({ } } } + async function blockingSend(action) { + if (decohered) { + throw decohered; + } + + await afterCommitWorkDone; + + return doBlockingSend(action).finally(() => pendingSwingStoreExport); + } async function shutdown() { return controller.shutdown(); From 2200e899fe7945ae8b7100edaffe943d913c85f9 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 18 Jul 2023 22:49:12 +0000 Subject: [PATCH 061/109] chore(x/swingset): better handle snapshot retrieve errors --- .../cosmos/x/swingset/keeper/snapshotter.go | 30 +++++++++++-------- .../x/swingset/keeper/snapshotter_test.go | 4 +-- 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index 4a445ce97ce..09599c414fa 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -50,9 +50,12 @@ type activeSnapshot struct { logger log.Logger // Use to synchronize the commit boundary startedResult chan error - // Internal flag indicating whether the cosmos driven snapshot process completed + // Internal flag indicating whether the snapshot was retrieved // Only read or written by the snapshot worker goroutine. retrieved bool + // Internal plumbing of any error that happen during `SnapshotExtension` + // Only read or written by the snapshot worker goroutine. + retrieveError error // Closed when this snapshot is complete done chan struct{} } @@ -196,20 +199,25 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { // In production this should indirectly call SnapshotExtension(). snapshotter.takeSnapshot(height) - // Check whether the cosmos Snapshot() method successfully handled our extension + // Restore any retrieve error swallowed by `takeSnapshot` + err = active.retrieveError + if err != nil { + logger.Error("failed to make swingset snapshot", "err", err) + } + + // Check whether the JS generated snapshot was retrieved by `SnapshotExtension` if active.retrieved { return } - logger.Error("failed to make swingset snapshot") action = &snapshotAction{ Type: "COSMOS_SNAPSHOT", BlockHeight: blockHeight, Request: "discard", } - _, err = snapshotter.blockingSend(action, false) + _, discardErr := snapshotter.blockingSend(action, false) - if err != nil { + if discardErr != nil { logger.Error("failed to discard swingset snapshot", "err", err) } }() @@ -279,14 +287,12 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa // a value was provided to a `return` statement. // See https://go.dev/blog/defer-panic-and-recover for details if err != nil { - var logger log.Logger - if snapshotter.activeSnapshot != nil { - logger = snapshotter.activeSnapshot.logger + activeSnapshot := snapshotter.activeSnapshot + if activeSnapshot != nil { + activeSnapshot.retrieveError = err } else { - logger = snapshotter.logger + snapshotter.logger.Error("swingset snapshot extension failed", "err", err) } - - logger.Error("swingset snapshot extension failed", "err", err) } }() @@ -310,6 +316,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa if err != nil { return err } + activeSnapshot.retrieved = true var exportDir string err = json.Unmarshal([]byte(out), &exportDir) @@ -374,7 +381,6 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } } - activeSnapshot.retrieved = true activeSnapshot.logger.Info("retrieved snapshot", "exportDir", exportDir) return nil diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/snapshotter_test.go index 196dad37eea..21e77de531c 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter_test.go @@ -138,7 +138,7 @@ func TestRetrievalFails(t *testing.T) { swingsetSnapshotter.takeSnapshot = func(height int64) { // shortcut to the snapshot manager calling the extension savedErr = swingsetSnapshotter.SnapshotExtension(uint64(height), nilWriter) - close(ch) + <-ch } err := swingsetSnapshotter.InitiateSnapshot(123) @@ -150,7 +150,7 @@ func TestRetrievalFails(t *testing.T) { t.Fatal(err) } - <-ch + close(ch) if savedErr == nil { t.Fatal("wanted retrieval error") } From 34a43ee94a4360a36ac107ada4d5406e98f3795f Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 27 Jul 2023 17:02:13 +0000 Subject: [PATCH 062/109] feat(deployment): replicate broken state sync in upgrade test --- .../agoric-upgrade-10/actions.sh | 16 +++++-- .../agoric-upgrade-10/env_setup.sh | 47 +++++++++++++++++++ .../agoric-upgrade-10/test.sh | 8 ++++ 3 files changed, 68 insertions(+), 3 deletions(-) diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh index 1654578be83..0aa7e992a95 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh @@ -174,6 +174,16 @@ OFFER=$(mktemp -t agops.XXX) agops vaults close --vaultId vault2 --giveMinted 5.75 --from $USER2ADDR --keyring-backend="test" >|"$OFFER" agops perf satisfaction --from "$USER2ADDR" --executeOffer "$OFFER" --keyring-backend=test -# # TODO test bidding -# # TODO liquidations -# # agops inter bid by-price --price 1 --give 1.0IST --from $GOV1ADDR --keyring-backend test +# replicate state-sync of node +# this will cause the swing-store to prune some data +killAgd +EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-10-XXX) +make_swing_store_snapshot $EXPORT_DIR || fail "Couldn't make swing-store snapshot" +test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store export data" +restore_swing_store_snapshot $EXPORT_DIR || fail "Couldn't restore swing-store snapshot" +rm -rf $EXPORT_DIR +startAgd + +# # TODO fully test bidding +# # TODO test liquidations +agops inter bid by-price --price 1 --give 1.0IST --from $GOV1ADDR --keyring-backend test diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh index 298875e91e6..ad484864d3a 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh @@ -106,3 +106,50 @@ submitDeliverInbound() { --chain-id="$CHAINID" -ojson --yes \ --from="$sender" --keyring-backend=test -b block } + +make_swing_store_snapshot() {( set -euo pipefail + EXPORT_DIR="$1" + /usr/src/agoric-sdk/packages/cosmic-swingset/src/export-kernel-db.js --home "$HOME/.agoric" --export-dir "$EXPORT_DIR" --verbose --include-export-data + + EXPORT_MANIFEST_FILE="$EXPORT_DIR/export-manifest.json" + EXPORT_DATA_FILE="$EXPORT_DIR/$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" + EXPORT_DATA_UNTRUSTED_FILE="${EXPORT_DATA_FILE%.*}-untrusted.jsonl" + EXPORT_HEIGHT=$(cat "$EXPORT_MANIFEST_FILE" | jq -r .blockHeight) + EXPORT_MANIFEST="$(cat $EXPORT_MANIFEST_FILE)" + + mv "$EXPORT_DATA_FILE" "$EXPORT_DATA_UNTRUSTED_FILE" + agd export --height $EXPORT_HEIGHT | jq -cr '.app_state.vstorage.data[] | if .path | startswith("swingStore.") then [.path[11:],.value] else empty end' > "$EXPORT_DATA_FILE" + + jq -n "$EXPORT_MANIFEST | .untrustedData=\"$(basename -- "$EXPORT_DATA_UNTRUSTED_FILE")\"" > "$EXPORT_MANIFEST_FILE" + + echo "Successful swing-store export for block $EXPORT_HEIGHT" +)} + +restore_swing_store_snapshot() {( set -euo pipefail + rm -f $HOME/.agoric/data/agoric/swingstore.sqlite + + /usr/src/agoric-sdk/packages/cosmic-swingset/src/import-kernel-db.js --home "$HOME/.agoric" --export-dir "$1" --verbose +)} + +compare_swing_store_export_data() { + EXPORT_DIR="$1" + EXPORT_MANIFEST_FILE="$EXPORT_DIR/export-manifest.json" + EXPORT_DATA_FILE="$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" + EXPORT_DATA_UNTRUSTED_FILE="$(cat "$EXPORT_MANIFEST_FILE" | jq -r .untrustedData)" + + if [ -z "$EXPORT_DATA_FILE" ]; then + echo "missing-export-data" + return + fi + + if [ -z "$EXPORT_DATA_UNTRUSTED_FILE" ]; then + echo "missing-untrusted-export-data" + return + fi + + diff <(cat "$EXPORT_DIR/$EXPORT_DATA_FILE" | sort) <(cat "$EXPORT_DIR/$EXPORT_DATA_UNTRUSTED_FILE" | sort) >&2 && { + echo "match" + } || { + echo "mismatch" + } +} diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh index 3f756b3ee77..633716f5b4a 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/test.sh @@ -35,3 +35,11 @@ test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults. test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault2 -o jsonlines | jq -r '.vaultState') "closed" "vault2 is closed" test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault2 -o jsonlines | jq -r '.locked.value') "0" "vault2 contains no collateral" test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault2 -o jsonlines | jq -r '.debtSnapshot.debt.value') "0" "vault2 has no debt" + +# verify state-sync would be broken +killAgd +EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-10-XXX) +make_swing_store_snapshot $EXPORT_DIR || fail "Couldn't make swing-store snapshot" +test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "mismatch" "swing-store broken state-sync" +rm -rf $EXPORT_DIR +startAgd \ No newline at end of file From c129663753cc3523491a4ae513bf1eeab1683070 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 31 Jul 2023 16:11:02 +0000 Subject: [PATCH 063/109] chore: add sequence diagrams of state-sync process --- docs/architecture/state-sync.md | 301 ++++++++++++++++++ .../cosmos/x/swingset/keeper/snapshotter.go | 5 + 2 files changed, 306 insertions(+) create mode 100644 docs/architecture/state-sync.md diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md new file mode 100644 index 00000000000..d8ce5b13fed --- /dev/null +++ b/docs/architecture/state-sync.md @@ -0,0 +1,301 @@ +# State-sync + +## Creating Snapshot + +```mermaid +sequenceDiagram + box whitesmoke Main goroutine + participant TM as Tendermint + participant A-M as App + participant MS-M as MultiStore + participant SSES-M as SwingSet ExtensionSnapshotter + end + + box whitesmoke App snapshot goroutine + participant SSES-AS as SwingSet ExtensionSnapshotter + participant A-AS as App + participant SM-AS as Snapshot manager + end + + box whitesmoke Cosmos snapshot goroutine + participant SM-CS as Snapshot manager + participant MS-CS as MultiStore + participant SSES-CS as SwingSet ExtensionSnapshotter + participant D-CS as Disk + end + + box whitesmoke JS Main process + participant CM as Chain Main + participant D as Disk + end + + box whitesmoke JS Export process + participant SSE as SwingStoreExport + participant Exporter as Exporter + participant D-E as Disk + end + + TM->>+A-M: Commit + A-M->>+SSES-M: WaitUntilSnapshotStarted() + SSES-M-->>-A-M: + A-M->>+CM: COMMIT_BLOCK + CM->>CM: swingStore.commit() + CM-->>-A-M: + A-M->>A-M: BaseApp.CommitWithoutSnapshot() + A-M->>+CM: AFTER_COMMIT_BLOCK + CM-->>-A-M: + A-M->>A-M: isSnapshotHeight: false + A-M-->>-TM: + + TM->>+A-M: BeginBlock + A-M->>+CM: BEGIN_BLOCK + CM-->>-A-M: + A-M-->>-TM: + + TM->>+A-M: EndBlock + A-M->>+CM: END_BLOCK + CM->>CM: runKernel() + CM-)A-M: vstorage->setWithoutNotify(prefixedExportDataEntries) + loop each data entry + A-M->>+MS-M: vstorage.SetStorage() + MS-M-->>-A-M: + end + CM-->>-A-M: + A-M-->>-TM: + + TM->>+A-M: Commit + A-M->>+SSES-M: WaitUntilSnapshotStarted() + SSES-M-->>-A-M: + A-M->>+CM: COMMIT_BLOCK + CM->>CM: swingStore.commit() + CM-->>-A-M: + A-M->>A-M: BaseApp.CommitWithoutSnapshot() + A-M->>+CM: AFTER_COMMIT_BLOCK + CM-->>-A-M: + A-M->>A-M: isSnapshotHeight: true + A-M->>+SSES-M: InitiateSnapshot() + SSES-M->>SSES-M: checkNotActive() + SSES-M->>SSES-M: active = activeSnapshot{} + SSES-M-)+SSES-AS: go + SSES-M-->>-A-M: + A-M-->>-TM: + + par App Snapshot + SSES-AS->>+CM: COSMOS_SNAPSHOT/initiate + CM->>+D: MkDir(exportDir) + D-->>-CM: + CM-)+SSE: initiateSwingStoreExport(exportDir) + CM->>CM: await started
(blocking) + CM-->>-SSES-AS: + alt not initiated + SSES-AS-)SSES-M: startedResult <- err
close(startedResult) + SSES-AS-)SSES-M: done <- err + else initiated + SSES-AS-)SSES-M: close(startedResult) + alt retrieval + SSES-AS->>+A-AS: BaseApp.Snapshot() + A-AS->>+SM-AS: Create() + SM-AS-)+SM-CS: go createSnapshot() + SM-CS->>+MS-CS: Snapshot() + loop each IAVL node + MS-CS->>+SM-CS: WriteMsg() + SM-CS-)SM-AS: chunks <- chunk + SM-CS-->>-MS-CS: + end + MS-CS-->>-SM-CS: + SM-CS->>+SSES-CS: SnapshotExtension() + SSES-CS->>+CM: COSMOS_SNAPSHOT/retrieve + CM->>CM: await done
(blocking) + CM-->>-SSES-CS: exportDir + SSES-CS->>+D-CS: Read(export-manifest.json) + D-CS-->>-SSES-CS: + loop + SSES-CS->>+D-CS: Read(artifactFile) + D-CS-->>-SSES-CS: + SSES-CS->>+SM-CS: payloadWriter(artifact{name, data}) + SM-CS-)SM-AS: chunks <- chunk + SM-CS-->>-SSES-CS: + end + SSES-CS->>+D-CS: Delete(exportDir) + D-CS-->>-SSES-CS: + SSES-CS-->>-SM-CS: + SM-CS-)-SM-AS: close(chunks) + SM-AS->>SM-AS: Save() + SM-AS-->>-A-AS: + A-AS-->>-SSES-AS: + else no retrieval + SSES-AS->>+A-AS: BaseApp.Snapshot() + A-AS-->>-SSES-AS: + SSES-AS->>+CM: COSMOS_SNAPSHOT/discard + CM-)SSE: Stop() + SSE-)CM: done::reject() + CM->>CM: await done + CM->>+D: Delete(exportDir) + D-->-CM: + CM-->>-SSES-AS: + SSES-AS-)SSES-M: done <- err + end + end + SSES-AS-)SSES-M: close(done) + deactivate SSES-AS + end + + par JS SwingStore export + SSE->>Exporter: makeExporter() + Exporter->>SSE: + SSE-)CM: started::resolve() + opt Export Data, not used in state-sync + SSE->>Exporter: getExportData() + Exporter-)SSE: export data iterator + loop each data entry + SSE->>+D-E: Append(export-data.jsonl, "JSON(entry tuple)\n") + D-E-->>-SSE: + end + end + SSE->>Exporter: getArtifactNames() + Exporter--)SSE: names async iterator + loop each artifact name + SSE->>Exporter: getArtifact(name) + Exporter--)SSE: artifactStream + SSE->>+D-E: Write(name, artifactStream) + D-E-->>-SSE: + end + SSE->>+D-E: Write(export-manifest.jsonl, manifest) + D-E-->>-SSE: + SSE-)CM: done::resolve() + deactivate SSE + end + + Note over TM, A-M: BeginBlock, EndBlock + + TM->>+A-M: Commit + A-M->>+SSES-M: WaitUntilSnapshotStarted() + SSES-M->>SSES-M: err = <-startedResult
(blocking) + SSES-M-->>-A-M: + A-M->>+CM: COMMIT_BLOCK + CM->>CM: await started
(blocking) + CM->>CM: swingStore.commit() + CM-->>-A-M: + A-M->>A-M: BaseApp.CommitWithoutSnapshot() + A-M->>+CM: AFTER_COMMIT_BLOCK + CM-->>-A-M: + A-M->>A-M: isSnapshotHeight: false + A-M-->>-TM: +``` + +## Restoring Snapshot + +```mermaid +sequenceDiagram + box whitesmoke Main goroutine + participant TM as Tendermint + participant A-M as BaseApp + participant SM-M as Snapshot Manager + end + + box whitesmoke Cosmos snapshot goroutine + participant SM-CS as Snapshot manager + participant MS-CS as MultiStore + participant SSES-CS as SwingSet ExtensionSnapshotter + participant D-CS as Disk + end + + box whitesmoke JS Main process + participant CM as Chain Main + participant D as Disk + participant SSI as StateSyncImport + participant ISS as importSwingStore + participant SS as SwingStore + end + + TM->>+A-M: OfferSnapshot + A-M->>+SM-M: Restore() + SM-M-)+SM-CS: go restoreSnapshot() + SM-M-->>-A-M: + A-M-->>-TM: + + par Snapshot Restore + SM-CS->>+MS-CS: Restore() + loop IAVL snapshot items + MS-CS->>+SM-CS: protoReader.ReadMsg() + SM-CS->>+SM-M: chunk = <-chunks + SM-M-->>-SM-CS: + SM-CS-->>-MS-CS: + MS-CS->>MS-CS: importer.Add(node) + end + MS-CS-->>-SM-CS: + + opt loop over extensions + SM-CS->>+SSES-CS: RestoreExtension() + SSES-CS->>SSES-CS: checkNotActive() + SSES-CS->>SSES-CS: activeExport = exportOperation{} + SSES-CS->>+D-CS: MkDir(exportDir) + D-CS-->>-SSES-CS: + SSES-CS->>+MS-CS: ExportStorageFromPrefix
("swingStore.") + MS-CS-->>-SSES-CS: vstorage data entries + loop each data entry + SSES-CS->>+D-CS: Append(export-data.jsonl,
"JSON(entry tuple)\n") + D-CS-->>-SSES-CS: + end + loop extension snapshot items + SSES-CS->>+SM-CS: payloadReader() + SM-CS->>+SM-M: chunk = <-chunks + SM-M-->>-SM-CS: + SM-CS-->>-SSES-CS: extension payloadBytes + SSES-CS->>SSES-CS: artifact = parse(payloadBytes) + SSES-CS->>+D-CS: Write(sanitizedFilename, artifact.data) + D-CS-->>-SSES-CS: + end + SSES-CS->>+D-CS: Write(export-manifest.jsonl, manifest) + D-CS-->>-SSES-CS: + SSES-CS->>+CM: COSMOS_SNAPSHOT/restore + CM->>+SSI: performStateSyncImport() + SSI->>+D: Read(export-manifest.json) + D-->>-SSI: + SSI->>+ISS: importSwingStore() + ISS->>ISS: initSwingStore() + ISS->>+SSI: exporter.getExportData() + SSI->>+D: Read(export-data.json) + D-->>-SSI: + SSI-->>-ISS: export data iterator + ISS->>+SS: restore kv and metadata + SS-->>-ISS: + ISS->>+SSI: exporter.getArtifactNames() + SSI--)-ISS: names async iterator + loop each artifact name + ISS->>+SSI: provider.getArtifact() + SSI->>+D: Read(artifactFilename) + D-->>-SSI: + SSI--)-ISS: artifactStream + ISS->>+SS: restore artifact + SS-->>-ISS: + end + ISS-->>-SSI: + SSI->>+SS: set(host.blockHeight) + SS-->>-SSI: + SSI-->>-CM: + CM-->>-SSES-CS: + SSES-CS->>+D-CS: Delete(exportDir) + D-CS-->>-SSES-CS: + SSES-CS-->>-SM-CS: + end + SM-CS-)-SM-M: chRestoreDone <- restoreDone{}
close(chRestoreDone) + end + + TM->>+A-M: ApplySnapshotChunk + A-M->>+SM-M: RestoreChunk() + SM-M->>SM-M: select chRestoreDone, default + alt done (abnormal) + SM-M-->>A-M: false, error + else normal + SM-M-)SM-M: chunks <- chunk + alt chunks remaining + SM-M-->>A-M: false + else last chunk + SM-M->>SM-M: <-chRestoreDone
(blocking) + SM-M-->>-A-M: true + end + end + A-M-->>-TM: + +``` diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index 09599c414fa..62df2352951 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -20,6 +20,11 @@ import ( tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) +// This module implements a Cosmos ExtensionSnapshotter to capture and restore +// state-sync Swingset state that is not part of the Cosmos DB. +// See docs/architecture/state-sync.md for a sequence diagram of how this +// module fits within the state-sync process. + var _ snapshots.ExtensionSnapshotter = &SwingsetSnapshotter{} // SnapshotFormat 1 is a proto message containing an artifact name, and the binary artifact data From fb2d7d115ab733c7b15bb95b9980e65db7170841 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 03:12:21 +0000 Subject: [PATCH 064/109] refactor(x/swingset): better export js-golang interface types --- docs/architecture/state-sync.md | 8 +- .../cosmos/x/swingset/keeper/snapshotter.go | 162 ++++++++++++------ .../x/swingset/keeper/snapshotter_test.go | 9 +- packages/cosmic-swingset/src/chain-main.js | 10 +- .../cosmic-swingset/src/export-kernel-db.js | 6 +- .../cosmic-swingset/src/import-kernel-db.js | 3 +- packages/cosmic-swingset/src/launch-chain.js | 2 +- packages/internal/src/action-types.js | 2 +- 8 files changed, 133 insertions(+), 69 deletions(-) diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md index d8ce5b13fed..b4a5738dc9b 100644 --- a/docs/architecture/state-sync.md +++ b/docs/architecture/state-sync.md @@ -81,7 +81,7 @@ sequenceDiagram A-M-->>-TM: par App Snapshot - SSES-AS->>+CM: COSMOS_SNAPSHOT/initiate + SSES-AS->>+CM: SWING_STORE_EXPORT/initiate CM->>+D: MkDir(exportDir) D-->>-CM: CM-)+SSE: initiateSwingStoreExport(exportDir) @@ -104,7 +104,7 @@ sequenceDiagram end MS-CS-->>-SM-CS: SM-CS->>+SSES-CS: SnapshotExtension() - SSES-CS->>+CM: COSMOS_SNAPSHOT/retrieve + SSES-CS->>+CM: SWING_STORE_EXPORT/retrieve CM->>CM: await done
(blocking) CM-->>-SSES-CS: exportDir SSES-CS->>+D-CS: Read(export-manifest.json) @@ -126,7 +126,7 @@ sequenceDiagram else no retrieval SSES-AS->>+A-AS: BaseApp.Snapshot() A-AS-->>-SSES-AS: - SSES-AS->>+CM: COSMOS_SNAPSHOT/discard + SSES-AS->>+CM: SWING_STORE_EXPORT/discard CM-)SSE: Stop() SSE-)CM: done::reject() CM->>CM: await done @@ -248,7 +248,7 @@ sequenceDiagram end SSES-CS->>+D-CS: Write(export-manifest.jsonl, manifest) D-CS-->>-SSES-CS: - SSES-CS->>+CM: COSMOS_SNAPSHOT/restore + SSES-CS->>+CM: SWING_STORE_EXPORT/restore CM->>+SSI: performStateSyncImport() SSI->>+D: Read(export-manifest.json) D-->>-SSI: diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index 62df2352951..b92f4c84055 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -30,12 +30,84 @@ var _ snapshots.ExtensionSnapshotter = &SwingsetSnapshotter{} // SnapshotFormat 1 is a proto message containing an artifact name, and the binary artifact data const SnapshotFormat = 1 -// The manifest filename must be synchronized with the JS export/import tooling +// exportManifest represents the content of the JS swing-store export manifest. +// The export is exchanged between Cosmos and JS using the file system, and only +// the directory containing the export is exchanged with a blockingSend. The +// manifest is a JSON file with the agreed upon file name of +// "export-manifest.json" in the export directory. It contains the file names +// for the "export data" (described in the godoc for exportDataFilename), and +// for the opaque artifacts of the export. +type exportManifest struct { + // BlockHeight is the block height of the manifest. + BlockHeight uint64 `json:"blockHeight,omitempty"` + // Data is the filename of the export data. + Data string `json:"data,omitempty"` + // Artifacts is the list of [artifact name, file name] pairs. + Artifacts [][2]string `json:"artifacts"` +} + +// ExportManifestFilename is the manifest filename which must be synchronized with the JS export/import tooling +// See packages/cosmic-swingset/src/export-kernel-db.js and packages/cosmic-swingset/src/import-kernel-db.js const ExportManifestFilename = "export-manifest.json" -const ExportDataFilename = "export-data.jsonl" + +// For restore operations, the swing-store "export data" is exchanged with the +// JS side as a file which encodes "export data" entries as a sequence of +// [key, value] JSON arrays each terminated by a new line. +// NB: this is not technically jsonlines since the entries are new line +// terminated instead of being new line separated, however the parsers in both +// JS and golang handle such extra whitespace. +const exportDataFilename = "export-data.jsonl" + +// UntrustedExportDataArtifactName is a special artifact name to indicate the +// presence of a synthetic artifact containing untrusted "export data". This +// artifact must not end up in the list of artifacts imported by the JS import +// tooling (which would fail). const UntrustedExportDataArtifactName = "UNTRUSTED-EXPORT-DATA" -const UntrustedExportDataFilename = "untrusted-export-data.jsonl" -const ExportedFilesMode = 0644 +const untrustedExportDataFilename = "untrusted-export-data.jsonl" + +const exportedFilesMode = 0644 + +// swingStoreExportActionType is the action type used for all swing-store +// export blockingSend, and synchronized with the JS side in +// packages/internal/src/action-types.js +const swingStoreExportActionType = "SWING_STORE_EXPORT" + +// initiateRequest is the request type for initiating an export +const initiateRequest = "initiate" + +type swingStoreInitiateExportAction struct { + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "initiate" + BlockHeight uint64 `json:"blockHeight"` // expected blockHeight +} + +// retrieveRequest is the request type for retrieving an initiated export +const retrieveRequest = "retrieve" + +type swingStoreRetrieveExportAction struct { + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "retrieve" +} +type swingStoreRetrieveResult = string + +// discardRequest is the request type for discarding an initiated but an export +// that was not retrieved +const discardRequest = "discard" + +type swingStoreDiscardExportAction struct { + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "discard" +} + +// restoreRequest is the request type for restoring an export +const restoreRequest = "restore" + +type swingStoreRestoreExportAction struct { + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "restore" + BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if deferring blockHeight to the manifest + Args [1]string `json:"args"` // args[1] is the directory in which the export to restore from is located +} var disallowedArtifactNameChar = regexp.MustCompile(`[^-_.a-zA-Z0-9]`) @@ -65,14 +137,6 @@ type activeSnapshot struct { done chan struct{} } -type exportManifest struct { - BlockHeight uint64 `json:"blockHeight,omitempty"` - // The filename of the export data - Data string `json:"data,omitempty"` - // The list of artifact names and their corresponding filenames - Artifacts [][2]string `json:"artifacts"` -} - type SwingsetSnapshotter struct { isConfigured func() bool takeSnapshot func(height int64) @@ -84,13 +148,6 @@ type SwingsetSnapshotter struct { activeSnapshot *activeSnapshot } -type snapshotAction struct { - Type string `json:"type"` // COSMOS_SNAPSHOT - BlockHeight uint64 `json:"blockHeight"` - Request string `json:"request"` // "initiate", "discard", "retrieve", or "restore" - Args []json.RawMessage `json:"args,omitempty"` -} - // NewSwingsetSnapshotter creates a SwingsetSnapshotter which exclusively // manages communication with the JS side for Swingset snapshots, ensuring // insensitivity to sub-block timing, and enforcing concurrency requirements. @@ -177,14 +234,14 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { go func() { defer close(active.done) - action := &snapshotAction{ - Type: "COSMOS_SNAPSHOT", + initiateAction := &swingStoreInitiateExportAction{ + Type: swingStoreExportActionType, BlockHeight: blockHeight, - Request: "initiate", + Request: initiateRequest, } - // blockingSend for COSMOS_SNAPSHOT action is safe to call from a goroutine - _, err := snapshotter.blockingSend(action, false) + // blockingSend for SWING_STORE_EXPORT action is safe to call from a goroutine + _, err = snapshotter.blockingSend(initiateAction, false) if err != nil { // First indicate a snapshot is no longer in progress if the call to @@ -215,15 +272,14 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { return } - action = &snapshotAction{ - Type: "COSMOS_SNAPSHOT", - BlockHeight: blockHeight, - Request: "discard", + discardAction := &swingStoreDiscardExportAction{ + Type: swingStoreExportActionType, + Request: discardRequest, } - _, discardErr := snapshotter.blockingSend(action, false) + _, discardErr := snapshotter.blockingSend(discardAction, false) if discardErr != nil { - logger.Error("failed to discard swingset snapshot", "err", err) + logger.Error("failed to discard swing-store snapshot", "err", err) } }() @@ -311,10 +367,9 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa return fmt.Errorf("swingset snapshot requested for unexpected height %d (expected %d)", blockHeight, activeSnapshot.blockHeight) } - action := &snapshotAction{ - Type: "COSMOS_SNAPSHOT", - BlockHeight: blockHeight, - Request: "retrieve", + action := &swingStoreRetrieveExportAction{ + Type: swingStoreExportActionType, + Request: retrieveRequest, } out, err := snapshotter.blockingSend(action, false) @@ -323,7 +378,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } activeSnapshot.retrieved = true - var exportDir string + var exportDir swingStoreRetrieveResult err = json.Unmarshal([]byte(out), &exportDir) if err != nil { return err @@ -386,7 +441,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } } - activeSnapshot.logger.Info("retrieved snapshot", "exportDir", exportDir) + activeSnapshot.logger.Info("retrieved swing-store export", "exportDir", exportDir) return nil } @@ -446,8 +501,8 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for swingStoreEntries := snapshotter.getSwingStoreExportData(ctx) if len(swingStoreEntries) > 0 { - manifest.Data = ExportDataFilename - exportDataFile, err := os.OpenFile(filepath.Join(exportDir, ExportDataFilename), os.O_CREATE|os.O_WRONLY, ExportedFilesMode) + manifest.Data = exportDataFilename + exportDataFile, err := os.OpenFile(filepath.Join(exportDir, exportDataFilename), os.O_CREATE|os.O_WRONLY, exportedFilesMode) if err != nil { return err } @@ -471,7 +526,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for } writeExportFile := func(filename string, data []byte) error { - return os.WriteFile(filepath.Join(exportDir, filename), data, ExportedFilesMode) + return os.WriteFile(filepath.Join(exportDir, filename), data, exportedFilesMode) } for { @@ -488,12 +543,14 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for } if artifact.Name != UntrustedExportDataArtifactName { - // Artifact verifiable on import from the export data - // Since we cannot trust the state-sync artifact at this point, we generate - // a safe and unique filename from the artifact name we received, by - // substituting any non letters-digits-hyphen-underscore-dot by a hyphen, - // and prefixing with an incremented id. - // The filename is not used for any purpose in the snapshotting logic. + // An artifact is only verifiable by the JS swing-store import using the + // information contained in the "export data". + // Since we cannot trust the source of the artifact at this point, + // including that the artifact's name is genuine, we generate a safe and + // unique filename from the artifact's name we received, by substituting + // any non letters-digits-hyphen-underscore-dot by a hyphen, and + // prefixing with an incremented id. + // The filename is not used for any purpose in the import logic. filename := sanitizeArtifactName(artifact.Name) filename = fmt.Sprintf("%d-%s", len(manifest.Artifacts), filename) manifest.Artifacts = append(manifest.Artifacts, [2]string{artifact.Name, filename}) @@ -501,7 +558,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for } else { // Pseudo artifact containing untrusted export data which may have been // saved separately for debugging purposes (not referenced from the manifest) - err = writeExportFile(UntrustedExportDataFilename, artifact.Data) + err = writeExportFile(untrustedExportDataFilename, artifact.Data) } if err != nil { return err @@ -517,16 +574,11 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for return err } - encodedExportDir, err := json.Marshal(exportDir) - if err != nil { - return err - } - - action := &snapshotAction{ - Type: "COSMOS_SNAPSHOT", + action := &swingStoreRestoreExportAction{ + Type: swingStoreExportActionType, BlockHeight: blockHeight, - Request: "restore", - Args: []json.RawMessage{encodedExportDir}, + Request: restoreRequest, + Args: [1]string{exportDir}, } _, err = snapshotter.blockingSend(action, true) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/snapshotter_test.go index 21e77de531c..c54472c529e 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter_test.go @@ -100,7 +100,8 @@ func TestSecondCommit(t *testing.T) { func TestInitiateFails(t *testing.T) { swingsetSnapshotter := newTestSnapshotter() swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { - if action.(*snapshotAction).Request == "initiate" { + initiateAction, ok := action.(*swingStoreInitiateExportAction) + if ok && initiateAction.Request == "initiate" { return "", errors.New("initiate failed") } return "", nil @@ -127,7 +128,8 @@ func TestInitiateFails(t *testing.T) { func TestRetrievalFails(t *testing.T) { swingsetSnapshotter := newTestSnapshotter() swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { - if action.(*snapshotAction).Request == "retrieve" { + retrieveAction, ok := action.(*swingStoreRetrieveExportAction) + if ok && retrieveAction.Request == "retrieve" { return "", errors.New("retrieve failed") } return "", nil @@ -163,7 +165,8 @@ func TestDiscard(t *testing.T) { discardCalled := false swingsetSnapshotter := newTestSnapshotter() swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { - if action.(*snapshotAction).Request == "discard" { + discardAction, ok := action.(*swingStoreDiscardExportAction) + if ok && discardAction.Request == "discard" { discardCalled = true } return "", nil diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 5574687fd2c..6418b07de96 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -494,7 +494,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { /** @type {Awaited>['blockingSend'] | undefined} */ let blockingSend; - async function handleCosmosSnapshot(blockHeight, request, requestArgs) { + async function handleSwingStoreExport(blockHeight, request, requestArgs) { switch (request) { case 'restore': { const exportDir = requestArgs[0]; @@ -646,7 +646,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { } // Snapshot actions are specific to cosmos chains and handled here - case ActionType.COSMOS_SNAPSHOT: { + case ActionType.SWING_STORE_EXPORT: { const { blockHeight, request, args: requestArgs } = action; writeSlogObject?.({ type: 'cosmic-swingset-snapshot-start', @@ -655,7 +655,11 @@ export default async function main(progname, args, { env, homedir, agcc }) { args: requestArgs, }); - const resultP = handleCosmosSnapshot(blockHeight, request, requestArgs); + const resultP = handleSwingStoreExport( + blockHeight, + request, + requestArgs, + ); resultP.then( result => { diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index 337083945b4..677c4f920ab 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -21,6 +21,10 @@ import { makeProcessValue } from './helpers/process-value.js'; /** @typedef {'current' | 'archival' | 'debug'} SwingStoreExportMode */ +// ExportManifestFilename is the manifest filename which must be synchronized +// with the golang SwingSetSnapshotter in golang/cosmos/x/swingset/keeper/snapshotter.go +export const ExportManifestFileName = 'export-manifest.json'; + // eslint-disable-next-line jsdoc/require-returns-check /** * @param {string | undefined} mode @@ -113,7 +117,7 @@ export const initiateSwingStoreExport = ( const cleanup = []; const exportDone = (async () => { - const manifestPath = pathResolve(exportDir, 'export-manifest.json'); + const manifestPath = pathResolve(exportDir, ExportManifestFileName); const manifestFile = await open(manifestPath, 'wx'); cleanup.push(async () => manifestFile.close()); diff --git a/packages/cosmic-swingset/src/import-kernel-db.js b/packages/cosmic-swingset/src/import-kernel-db.js index 4acd7152841..113b5415f90 100755 --- a/packages/cosmic-swingset/src/import-kernel-db.js +++ b/packages/cosmic-swingset/src/import-kernel-db.js @@ -17,6 +17,7 @@ import { importSwingStore } from '@agoric/swing-store'; import { isEntrypoint } from './helpers/is-entrypoint.js'; import { makeProcessValue } from './helpers/process-value.js'; +import { ExportManifestFileName } from './export-kernel-db.js'; /** * @typedef {object} StateSyncImporterOptions @@ -52,7 +53,7 @@ export const performStateSyncImport = async ( return resolvedPath; }; - const manifestPath = safeExportFileResolve('export-manifest.json'); + const manifestPath = safeExportFileResolve(ExportManifestFileName); /** @type {Readonly} */ const manifest = await readFile(manifestPath, { encoding: 'utf-8' }).then( data => JSON.parse(data), diff --git a/packages/cosmic-swingset/src/launch-chain.js b/packages/cosmic-swingset/src/launch-chain.js index cb3a78a306a..f7e1531525f 100644 --- a/packages/cosmic-swingset/src/launch-chain.js +++ b/packages/cosmic-swingset/src/launch-chain.js @@ -673,7 +673,7 @@ export async function launch({ // Handle block related actions // Some actions that are integration specific may be handled by the caller - // For example COSMOS_SNAPSHOT is handled in chain-main.js + // For example SWING_STORE_EXPORT is handled in chain-main.js async function doBlockingSend(action) { await null; // blockManagerConsole.warn( diff --git a/packages/internal/src/action-types.js b/packages/internal/src/action-types.js index d6a4b2b5333..a7dec994dff 100644 --- a/packages/internal/src/action-types.js +++ b/packages/internal/src/action-types.js @@ -1,7 +1,7 @@ // @jessie-check export const AG_COSMOS_INIT = 'AG_COSMOS_INIT'; -export const COSMOS_SNAPSHOT = 'COSMOS_SNAPSHOT'; +export const SWING_STORE_EXPORT = 'SWING_STORE_EXPORT'; export const BEGIN_BLOCK = 'BEGIN_BLOCK'; export const CALCULATE_FEES_IN_BEANS = 'CALCULATE_FEES_IN_BEANS'; export const CORE_EVAL = 'CORE_EVAL'; From 8820097299dfe2d12c373aa860a303d483f2ee47 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 03:42:44 +0000 Subject: [PATCH 065/109] chore(x/swingset): update some godoc --- .../cosmos/x/swingset/keeper/snapshotter.go | 64 ++++++++++--------- 1 file changed, 34 insertions(+), 30 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index b92f4c84055..c5533ab6f1b 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -27,7 +27,7 @@ import ( var _ snapshots.ExtensionSnapshotter = &SwingsetSnapshotter{} -// SnapshotFormat 1 is a proto message containing an artifact name, and the binary artifact data +// SnapshotFormat 1 defines all extension payloads to be SwingStoreArtifact proto messages const SnapshotFormat = 1 // exportManifest represents the content of the JS swing-store export manifest. @@ -179,6 +179,27 @@ func NewSwingsetSnapshotter( } } +// SnapshotName returns the name of the snapshotter, it should be unique in the manager. +// Implements ExtensionSnapshotter +func (snapshotter *SwingsetSnapshotter) SnapshotName() string { + return types.ModuleName +} + +// SnapshotFormat returns the extension specific format used to encode the +// extension payloads when creating a snapshot. It's independent of the format +// used for the overall state-sync snapshot. +// Implements ExtensionSnapshotter +func (snapshotter *SwingsetSnapshotter) SnapshotFormat() uint32 { + return SnapshotFormat +} + +// SupportedFormats returns a list of extension specific payload formats it can +// restore from. +// Implements ExtensionSnapshotter +func (snapshotter *SwingsetSnapshotter) SupportedFormats() []uint32 { + return []uint32{SnapshotFormat} +} + // checkNotActive returns an error if there is an active snapshot. func (snapshotter *SwingsetSnapshotter) checkNotActive() error { active := snapshotter.activeSnapshot @@ -317,29 +338,13 @@ func (snapshotter *SwingsetSnapshotter) WaitUntilSnapshotStarted() error { return startErr } -// SnapshotName returns the name of snapshotter, it should be unique in the manager. -// Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SnapshotName() string { - return types.ModuleName -} - -// SnapshotFormat returns the default format the extension snapshotter uses to encode the -// payloads when taking a snapshot. -// It's defined within the extension, different from the global format for the whole state-sync snapshot. -// Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SnapshotFormat() uint32 { - return SnapshotFormat -} - -// SupportedFormats returns a list of formats it can restore from. -// Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SupportedFormats() []uint32 { - return []uint32{SnapshotFormat} -} - -// SnapshotExtension writes extension payloads into the underlying protobuf stream. -// This operation is invoked by the snapshot manager in the goroutine started by -// `InitiateSnapshot`. +// SnapshotExtension is the method invoked by cosmos to write extension payloads +// into the underlying protobuf stream of the state-sync snapshot. +// This method is invoked by the cosmos snapshot manager in a goroutine it +// started during the call to takeAppSnapshot. However the snapshot manager +// fully synchronizes its goroutine with the goroutine started by this +// SwingsetSnapshotter. +// // Implements ExtensionSnapshotter func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, payloadWriter snapshots.ExtensionPayloadWriter) (err error) { defer func() { @@ -364,7 +369,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } if activeSnapshot.blockHeight != blockHeight { - return fmt.Errorf("swingset snapshot requested for unexpected height %d (expected %d)", blockHeight, activeSnapshot.blockHeight) + return fmt.Errorf("swingset extension snapshot requested for unexpected height %d (expected %d)", blockHeight, activeSnapshot.blockHeight) } action := &swingStoreRetrieveExportAction{ @@ -447,7 +452,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } // RestoreExtension restores an extension state snapshot, -// the payload reader returns `io.EOF` when it reaches the extension boundaries. +// the payload reader returns io.EOF when it reaches the extension boundaries. // Implements ExtensionSnapshotter func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, format uint32, payloadReader snapshots.ExtensionPayloadReader) error { if format != SnapshotFormat { @@ -498,9 +503,9 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for // AppHash, which means the SwingStore data it contains can be used as the // trusted root against which to validate the artifacts. ctx := snapshotter.newRestoreContext(height) - swingStoreEntries := snapshotter.getSwingStoreExportData(ctx) + exportDataEntries := snapshotter.getSwingStoreExportData(ctx) - if len(swingStoreEntries) > 0 { + if len(exportDataEntries) > 0 { manifest.Data = exportDataFilename exportDataFile, err := os.OpenFile(filepath.Join(exportDir, exportDataFilename), os.O_CREATE|os.O_WRONLY, exportedFilesMode) if err != nil { @@ -510,7 +515,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for encoder := json.NewEncoder(exportDataFile) encoder.SetEscapeHTML(false) - for _, dataEntry := range swingStoreEntries { + for _, dataEntry := range exportDataEntries { entry := []string{dataEntry.Path, dataEntry.Value} err := encoder.Encode(entry) if err != nil { @@ -522,7 +527,6 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for if err != nil { return err } - exportDataFile.Close() } writeExportFile := func(filename string, data []byte) error { From 5cb29d2f60c2b0f74e5fb7ca2edda32fe687a702 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 06:30:05 +0000 Subject: [PATCH 066/109] chore(x/swingset): rename to swing_store_exports_handler.go --- .../keeper/{snapshotter.go => swing_store_exports_handler.go} | 0 .../{snapshotter_test.go => swing_store_exports_handler_test.go} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename golang/cosmos/x/swingset/keeper/{snapshotter.go => swing_store_exports_handler.go} (100%) rename golang/cosmos/x/swingset/keeper/{snapshotter_test.go => swing_store_exports_handler_test.go} (100%) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go similarity index 100% rename from golang/cosmos/x/swingset/keeper/snapshotter.go rename to golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go similarity index 100% rename from golang/cosmos/x/swingset/keeper/snapshotter_test.go rename to golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go From e3bd6b5f4e910ae72e94876ca76eb0643f585b0e Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 06:17:01 +0000 Subject: [PATCH 067/109] chore(x/swingset): rename to extension_snapshotter.go --- .../swingset/keeper/{snapshotter.go => extension_snapshotter.go} | 0 .../keeper/{snapshotter_test.go => extension_snapshotter_test.go} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename golang/cosmos/x/swingset/keeper/{snapshotter.go => extension_snapshotter.go} (100%) rename golang/cosmos/x/swingset/keeper/{snapshotter_test.go => extension_snapshotter_test.go} (100%) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go similarity index 100% rename from golang/cosmos/x/swingset/keeper/snapshotter.go rename to golang/cosmos/x/swingset/keeper/extension_snapshotter.go diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go similarity index 100% rename from golang/cosmos/x/swingset/keeper/snapshotter_test.go rename to golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go From f8acd22381ff3da4682bcb0cdcf71665095506a4 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 04:23:01 +0000 Subject: [PATCH 068/109] feat(x/swingset): add WaitUntilSwingStoreExportDone Refactor block/snapshot synchronization --- docs/architecture/state-sync.md | 22 +- golang/cosmos/app/app.go | 4 +- .../cosmos/x/swingset/keeper/snapshotter.go | 326 ++++++++++++------ .../x/swingset/keeper/snapshotter_test.go | 56 ++- 4 files changed, 264 insertions(+), 144 deletions(-) diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md index b4a5738dc9b..9c214fe2d8f 100644 --- a/docs/architecture/state-sync.md +++ b/docs/architecture/state-sync.md @@ -36,7 +36,7 @@ sequenceDiagram end TM->>+A-M: Commit - A-M->>+SSES-M: WaitUntilSnapshotStarted() + A-M->>+SSES-M: WaitUntilSwingStoreExportStarted() SSES-M-->>-A-M: A-M->>+CM: COMMIT_BLOCK CM->>CM: swingStore.commit() @@ -64,7 +64,7 @@ sequenceDiagram A-M-->>-TM: TM->>+A-M: Commit - A-M->>+SSES-M: WaitUntilSnapshotStarted() + A-M->>+SSES-M: WaitUntilSwingStoreExportStarted() SSES-M-->>-A-M: A-M->>+CM: COMMIT_BLOCK CM->>CM: swingStore.commit() @@ -75,7 +75,7 @@ sequenceDiagram A-M->>A-M: isSnapshotHeight: true A-M->>+SSES-M: InitiateSnapshot() SSES-M->>SSES-M: checkNotActive() - SSES-M->>SSES-M: active = activeSnapshot{} + SSES-M->>SSES-M: activeOperation = operationDetails{} SSES-M-)+SSES-AS: go SSES-M-->>-A-M: A-M-->>-TM: @@ -88,10 +88,10 @@ sequenceDiagram CM->>CM: await started
(blocking) CM-->>-SSES-AS: alt not initiated - SSES-AS-)SSES-M: startedResult <- err
close(startedResult) - SSES-AS-)SSES-M: done <- err + SSES-AS-)SSES-M: exportStartedResult <- err
close(exportStartedResult) + SSES-AS-)SSES-M: exportDone <- err else initiated - SSES-AS-)SSES-M: close(startedResult) + SSES-AS-)SSES-M: close(exportStartedResult) alt retrieval SSES-AS->>+A-AS: BaseApp.Snapshot() A-AS->>+SM-AS: Create() @@ -133,10 +133,10 @@ sequenceDiagram CM->>+D: Delete(exportDir) D-->-CM: CM-->>-SSES-AS: - SSES-AS-)SSES-M: done <- err + SSES-AS-)SSES-M: exportDone <- err end end - SSES-AS-)SSES-M: close(done) + SSES-AS-)SSES-M: close(exportDone) deactivate SSES-AS end @@ -169,8 +169,8 @@ sequenceDiagram Note over TM, A-M: BeginBlock, EndBlock TM->>+A-M: Commit - A-M->>+SSES-M: WaitUntilSnapshotStarted() - SSES-M->>SSES-M: err = <-startedResult
(blocking) + A-M->>+SSES-M: WaitUntilSwingStoreExportStarted() + SSES-M->>SSES-M: err = <-exportStartedResult
(blocking) SSES-M-->>-A-M: A-M->>+CM: COMMIT_BLOCK CM->>CM: await started
(blocking) @@ -228,7 +228,7 @@ sequenceDiagram opt loop over extensions SM-CS->>+SSES-CS: RestoreExtension() SSES-CS->>SSES-CS: checkNotActive() - SSES-CS->>SSES-CS: activeExport = exportOperation{} + SSES-CS->>SSES-CS: activeOperation = operationDetails{} SSES-CS->>+D-CS: MkDir(exportDir) D-CS-->>-SSES-CS: SSES-CS->>+MS-CS: ExportStorageFromPrefix
("swingStore.") diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 82e28a086ad..3fae93e8d1a 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -954,10 +954,10 @@ func (app *GaiaApp) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci // Commit tells the controller that the block is commited func (app *GaiaApp) Commit() abci.ResponseCommit { - err := app.SwingSetSnapshotter.WaitUntilSnapshotStarted() + err := swingsetkeeper.WaitUntilSwingStoreExportStarted() if err != nil { - app.Logger().Error("swingset snapshot failed to start", "err", err) + app.Logger().Error("swing-store export failed to start", "err", err) } // Frontrun the BaseApp's Commit method diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index c5533ab6f1b..ce785c147b6 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -16,6 +16,7 @@ import ( "github.com/cosmos/cosmos-sdk/baseapp" snapshots "github.com/cosmos/cosmos-sdk/snapshots/types" sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/tendermint/tendermint/libs/log" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) @@ -118,23 +119,143 @@ func sanitizeArtifactName(name string) string { return disallowedArtifactNameChar.ReplaceAllString(name, "-") } -type activeSnapshot struct { - // Whether the operation in progress is a restore +type operationDetails struct { + // isRestore indicates whether the operation in progress is a restore. + // It is assigned at creation and never mutated. isRestore bool - // The block height of the snapshot in progress + // blockHeight is the block height of this in-progress operation. + // It is assigned at creation and never mutated. blockHeight uint64 - // The logger for this snapshot + // logger is the destination for this operation's log messages. + // It is assigned at creation and never mutated. logger log.Logger - // Use to synchronize the commit boundary - startedResult chan error - // Internal flag indicating whether the snapshot was retrieved - // Only read or written by the snapshot worker goroutine. - retrieved bool + // exportStartedResult is used to synchronize the commit boundary by the + // component performing the export operation to ensure export determinism. + // unused for restore operations + // It is assigned at creation and never mutated. The started goroutine + // writes into the channel and closes it. The main goroutine reads from the + // channel. + exportStartedResult chan error + // exportRetrieved is an internal flag indicating whether the JS generated + // the "retrieve" blockingSend was performed or not, and used to control + // whether to send a "discard" request if the JS side stayed responsible for + // the generated but un-retrieved export. + // It is only read or written by the export operation's goroutine. + exportRetrieved bool // Internal plumbing of any error that happen during `SnapshotExtension` // Only read or written by the snapshot worker goroutine. retrieveError error - // Closed when this snapshot is complete - done chan struct{} + // exportDone is a channel that is closed when the active export operation + // is complete. + // It is assigned at creation and never mutated. The started goroutine + // writes into the channel and closes it. The main goroutine reads from the + // channel. + exportDone chan error +} + +// activeOperation is a global variable reflecting a swing-store import or +// export in progress on the JS side. +// This variable is only assigned to through calls of the public methods of +// SwingsetSnapshotter, which rely on the exportDone channel getting +// closed to nil this variable. +// Only the calls to InitiateSnapshot and RestoreSnapshot set this to a non-nil +// value. The goroutine in which these calls occur is referred to as the +// "main goroutine". That goroutine may be different over time, but it's the +// caller's responsibility to ensure those goroutines do not overlap calls to +// the SwingsetSnapshotter public methods. +// See also the details of each field for the conditions under which they are +// accessed. +var activeOperation *operationDetails + +// WaitUntilSwingStoreExportStarted synchronizes with an export operation in +// progress, if any. +// The JS swing-store export must have started before a new block is committed +// to ensure the content of the export is the one expected. The app must call +// this method before sending a commit action to the JS controller. +// +// Waits for a just initiated export operation to have started in its goroutine. +// If no operation is in progress (InitiateSnapshot hasn't been called or +// already completed), or if we previously checked if the operation had started, +// returns immediately. +// +// Must be called by the main goroutine +func WaitUntilSwingStoreExportStarted() error { + operationDetails := activeOperation + if operationDetails == nil { + return nil + } + // Block until the active operation has started, saving the result. + // The operation's goroutine only produces a value in case of an error, + // and closes the channel once the export has started or failed. + // Only the first call after an export was initiated will report an error. + startErr := <-operationDetails.exportStartedResult + + // Check if the active export operation is done, and if so, nil it out so + // future calls are faster. + select { + case <-operationDetails.exportDone: + // If there was a start error, the channel is already closed at this point. + activeOperation = nil + default: + // don't wait for it to finish + // If there is no start error, the operation may take an arbitrary amount + // of time to terminate, likely spanning multiple blocks. However this + // function will only ever observe the expected activeOperation since the + // internal checkNotActive() called immediately on InitiateSnapshot will + // nil-out activeOperation if a stale value was sill sitting around. + } + + return startErr +} + +// WaitUntilSwingStoreExportDone synchronizes with the completion of an export +// operation in progress, if any. +// Only a single swing-store operation may execute at a time. Calling +// InitiateSnapshot or RestoreSnapshot will fail if a swing-store operation is +// already in progress. Furthermore, a component may need to know once an +// export it initiated has completed. Once this method call returns, the +// goroutine is guaranteed to have terminated. +// +// Reports any error that may have occurred from InitiateSnapshot. +// If no export operation is in progress (InitiateSnapshot hasn't been called or +// already completed), or if we previously checked if an export had completed, +// returns immediately. +// +// Must be called by the main goroutine +func WaitUntilSwingStoreExportDone() error { + operationDetails := activeOperation + if operationDetails == nil { + return nil + } + // Block until the active export has completed. + // The export operation's goroutine only produces a value in case of an error, + // and closes the channel once the export has completed or failed. + // Only the first call after an export was initiated will report an error. + exportErr := <-operationDetails.exportDone + activeOperation = nil + + return exportErr +} + +// checkNotActive returns an error if there is an active operation. +// +// Always internally called by the main goroutine +func checkNotActive() error { + operationDetails := activeOperation + if operationDetails != nil { + select { + case <-operationDetails.exportDone: + // nil-out any stale operation + activeOperation = nil + default: + if operationDetails.isRestore { + return fmt.Errorf("restore operation already in progress for height %d", operationDetails.blockHeight) + } else { + return fmt.Errorf("export operation already in progress for height %d", operationDetails.blockHeight) + } + } + } + return nil } type SwingsetSnapshotter struct { @@ -144,8 +265,6 @@ type SwingsetSnapshotter struct { logger log.Logger getSwingStoreExportData func(ctx sdk.Context) []*vstoragetypes.DataEntry blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error) - // Only modified by the main goroutine. - activeSnapshot *activeSnapshot } // NewSwingsetSnapshotter creates a SwingsetSnapshotter which exclusively @@ -175,7 +294,6 @@ func NewSwingsetSnapshotter( logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "snapshotter"), getSwingStoreExportData: getSwingStoreExportData, blockingSend: blockingSend, - activeSnapshot: nil, } } @@ -200,31 +318,14 @@ func (snapshotter *SwingsetSnapshotter) SupportedFormats() []uint32 { return []uint32{SnapshotFormat} } -// checkNotActive returns an error if there is an active snapshot. -func (snapshotter *SwingsetSnapshotter) checkNotActive() error { - active := snapshotter.activeSnapshot - if active != nil { - select { - case <-active.done: - snapshotter.activeSnapshot = nil - default: - if active.isRestore { - return fmt.Errorf("snapshot restore already in progress for height %d", active.blockHeight) - } else { - return fmt.Errorf("snapshot already in progress for height %d", active.blockHeight) - } - } - } - return nil -} - -// InitiateSnapshot synchronously initiates a snapshot for the given height. -// If a snapshot is already in progress, or if no snapshot manager is configured, -// this will fail. -// The snapshot operation is performed in a goroutine, and synchronized with the -// main thread through the `WaitUntilSnapshotStarted` method. +// InitiateSnapshot initiates a snapshot for the given block height. +// If a snapshot is already in progress, or if no snapshot manager is +// configured, this will fail. +// +// The snapshot operation is performed in a goroutine. +// Use WaitUntilSwingStoreExportStarted to synchronize commit boundaries. func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { - err := snapshotter.checkNotActive() + err := checkNotActive() if err != nil { return err } @@ -240,20 +341,42 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { logger := snapshotter.logger.With("height", blockHeight) - // Indicate that a snapshot has been initiated by setting `activeSnapshot`. + // Indicate that an export operation has been initiated by setting the global + // activeOperation var. // This structure is used to synchronize with the goroutine spawned below. - // It's nilled-out before exiting (and is the only code that does so). - active := &activeSnapshot{ - blockHeight: blockHeight, - logger: logger, - startedResult: make(chan error, 1), - retrieved: false, - done: make(chan struct{}), + operationDetails := &operationDetails{ + blockHeight: blockHeight, + logger: logger, + exportStartedResult: make(chan error, 1), + exportRetrieved: false, + exportDone: make(chan error, 1), } - snapshotter.activeSnapshot = active + activeOperation = operationDetails go func() { - defer close(active.done) + var err error + var startedErr error + defer func() { + if err == nil { + err = startedErr + } + if err != nil { + operationDetails.exportDone <- err + } + // First, indicate an export is no longer in progress. This ensures that + // for an operation with a start error, a call to WaitUntilSwingStoreExportStarted + // waiting on exportStartedResult will always find the operation has + // completed, and clear the active operation instead of racing if the + // channel close order was reversed. + close(operationDetails.exportDone) + // Then signal the current export operation that it failed to start, + // which will be reported to a waiting WaitUntilSwingStoreExportStarted, + // or the next call otherwise. + if startedErr != nil { + operationDetails.exportStartedResult <- startedErr + close(operationDetails.exportStartedResult) + } + }() initiateAction := &swingStoreInitiateExportAction{ Type: swingStoreExportActionType, @@ -262,34 +385,30 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { } // blockingSend for SWING_STORE_EXPORT action is safe to call from a goroutine - _, err = snapshotter.blockingSend(initiateAction, false) + _, startedErr = snapshotter.blockingSend(initiateAction, false) - if err != nil { - // First indicate a snapshot is no longer in progress if the call to - // `WaitUntilSnapshotStarted` has't happened yet. - // Then signal the current snapshot operation if a call to - // `WaitUntilSnapshotStarted` was already waiting. - active.startedResult <- err - close(active.startedResult) - logger.Error("failed to initiate swingset snapshot", "err", err) + if startedErr != nil { + logger.Error("failed to initiate swing-store export", "err", startedErr) + // The deferred function will communicate the error and close channels + // in the appropriate order. return } - // Signal that the snapshot operation has started in the goroutine. Calls to - // `WaitUntilSnapshotStarted` will no longer block. - close(active.startedResult) + // Signal that the export operation has started successfully in the goroutine. + // Calls to WaitUntilSwingStoreExportStarted will no longer block. + close(operationDetails.exportStartedResult) // In production this should indirectly call SnapshotExtension(). snapshotter.takeSnapshot(height) // Restore any retrieve error swallowed by `takeSnapshot` - err = active.retrieveError + err = activeOperation.retrieveError if err != nil { - logger.Error("failed to make swingset snapshot", "err", err) + logger.Error("failed to process swing-store export", "err", err) } - // Check whether the JS generated snapshot was retrieved by `SnapshotExtension` - if active.retrieved { + // Check whether the JS generated export was retrieved by SnapshotExtension + if operationDetails.exportRetrieved { return } @@ -300,44 +419,21 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { _, discardErr := snapshotter.blockingSend(discardAction, false) if discardErr != nil { - logger.Error("failed to discard swing-store snapshot", "err", err) + logger.Error("failed to discard swing-store export", "err", err) + } + + if err == nil { + err = discardErr + } else if discardErr != nil { + // Safe to wrap error and use detailed error info since this error + // will not go back into swingset layers + err = sdkerrors.Wrapf(err, "failed to discard swing-store export after failing to process export: %+v", discardErr) } }() return nil } -// WaitUntilSnapshotStarted synchronizes with a snapshot in progress, if any. -// The JS SwingStore export must have started before a new block is committed. -// The app must call this method before sending a commit action to SwingSet. -// -// Waits for a just initiated snapshot to have started in its goroutine. -// If no snapshot is in progress (`InitiateSnapshot` hasn't been called or -// already completed), or if we previously checked if the snapshot had started, -// returns immediately. -func (snapshotter *SwingsetSnapshotter) WaitUntilSnapshotStarted() error { - activeSnapshot := snapshotter.activeSnapshot - if activeSnapshot == nil { - return nil - } - // Block until the active snapshot has started, saving the result. - // The snapshot goroutine only produces a value in case of an error, - // and closes the channel once the snapshot has started or failed. - // Only the first call after a snapshot was initiated will report an error. - startErr := <-activeSnapshot.startedResult - - // Check if the active snapshot is done, and if so, nil it out so future - // calls are faster. - select { - case <-activeSnapshot.done: - snapshotter.activeSnapshot = nil - default: - // don't wait for it to finish - } - - return startErr -} - // SnapshotExtension is the method invoked by cosmos to write extension payloads // into the underlying protobuf stream of the state-sync snapshot. // This method is invoked by the cosmos snapshot manager in a goroutine it @@ -353,23 +449,23 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa // a value was provided to a `return` statement. // See https://go.dev/blog/defer-panic-and-recover for details if err != nil { - activeSnapshot := snapshotter.activeSnapshot - if activeSnapshot != nil { - activeSnapshot.retrieveError = err + operationDetails := activeOperation + if operationDetails != nil { + operationDetails.retrieveError = err } else { snapshotter.logger.Error("swingset snapshot extension failed", "err", err) } } }() - activeSnapshot := snapshotter.activeSnapshot - if activeSnapshot == nil { + operationDetails := activeOperation + if operationDetails == nil { // shouldn't happen, but return an error if it does - return errors.New("no active swingset snapshot") + return errors.New("no active swing-store export operation") } - if activeSnapshot.blockHeight != blockHeight { - return fmt.Errorf("swingset extension snapshot requested for unexpected height %d (expected %d)", blockHeight, activeSnapshot.blockHeight) + if operationDetails.blockHeight != blockHeight { + return fmt.Errorf("swingset extension snapshot requested for unexpected height %d (expected %d)", blockHeight, operationDetails.blockHeight) } action := &swingStoreRetrieveExportAction{ @@ -381,7 +477,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa if err != nil { return err } - activeSnapshot.retrieved = true + operationDetails.exportRetrieved = true var exportDir swingStoreRetrieveResult err = json.Unmarshal([]byte(out), &exportDir) @@ -446,7 +542,7 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa } } - activeSnapshot.logger.Info("retrieved swing-store export", "exportDir", exportDir) + operationDetails.logger.Info("retrieved swing-store export", "exportDir", exportDir) return nil } @@ -464,7 +560,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for } height := int64(blockHeight) - err := snapshotter.checkNotActive() + err := checkNotActive() if err != nil { return err } @@ -472,20 +568,20 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for // We technically don't need to create an active snapshot here since both // `InitiateSnapshot` and `RestoreExtension` should only be called from the // main thread, but it doesn't cost much to add in case things go wrong. - active := &activeSnapshot{ + operationDetails := &operationDetails{ isRestore: true, blockHeight: blockHeight, logger: snapshotter.logger, // goroutine synchronization is unnecessary since anything checking should - // be called from the same thread. - // Effectively `WaitUntilSnapshotStarted` would block infinitely and - // and `InitiateSnapshot` will error when calling `checkNotActive`. - startedResult: nil, - done: nil, + // be called from the same goroutine. + // Effectively WaitUntilSwingStoreExportStarted would block infinitely and + // exportsHandler.InitiateExport will error when calling checkNotActive. + exportStartedResult: nil, + exportDone: nil, } - snapshotter.activeSnapshot = active + activeOperation = operationDetails defer func() { - snapshotter.activeSnapshot = nil + activeOperation = nil }() exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-state-sync-restore-%d-*", blockHeight)) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/snapshotter_test.go index c54472c529e..f4f6dd64015 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter_test.go @@ -31,7 +31,7 @@ func TestSnapshotInProgress(t *testing.T) { if err != nil { t.Fatal(err) } - err = swingsetSnapshotter.WaitUntilSnapshotStarted() + err = WaitUntilSwingStoreExportStarted() if err != nil { t.Fatal(err) } @@ -51,11 +51,19 @@ func TestSnapshotInProgress(t *testing.T) { } close(ch) - <-swingsetSnapshotter.activeSnapshot.done + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } + err = swingsetSnapshotter.InitiateSnapshot(456) if err != nil { t.Fatal(err) } + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } } func TestNotConfigured(t *testing.T) { @@ -77,7 +85,7 @@ func TestSecondCommit(t *testing.T) { } // First run through app.Commit() - err := swingsetSnapshotter.WaitUntilSnapshotStarted() + err := WaitUntilSwingStoreExportStarted() if err != nil { t.Fatal(err) } @@ -87,14 +95,17 @@ func TestSecondCommit(t *testing.T) { } // Second run through app.Commit() - should return right away - err = swingsetSnapshotter.WaitUntilSnapshotStarted() + err = WaitUntilSwingStoreExportStarted() if err != nil { t.Fatal(err) } // close the signaling channel to let goroutine exit close(ch) - <-swingsetSnapshotter.activeSnapshot.done + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } } func TestInitiateFails(t *testing.T) { @@ -111,7 +122,7 @@ func TestInitiateFails(t *testing.T) { if err != nil { t.Fatal(err) } - err = swingsetSnapshotter.WaitUntilSnapshotStarted() + err = WaitUntilSwingStoreExportStarted() if err == nil { t.Fatal("wanted initiation error") } @@ -119,18 +130,24 @@ func TestInitiateFails(t *testing.T) { t.Errorf(`wanted error "initiate failed", got "%s"`, err.Error()) } // another wait should succeed without error - err = swingsetSnapshotter.WaitUntilSnapshotStarted() + err = WaitUntilSwingStoreExportStarted() if err != nil { t.Error(err) } + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } } func TestRetrievalFails(t *testing.T) { swingsetSnapshotter := newTestSnapshotter() + var retrieveError error swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { retrieveAction, ok := action.(*swingStoreRetrieveExportAction) if ok && retrieveAction.Request == "retrieve" { - return "", errors.New("retrieve failed") + retrieveError = errors.New("retrieve failed") + return "", retrieveError } return "", nil } @@ -147,17 +164,18 @@ func TestRetrievalFails(t *testing.T) { if err != nil { t.Fatal(err) } - err = swingsetSnapshotter.WaitUntilSnapshotStarted() + err = WaitUntilSwingStoreExportStarted() if err != nil { t.Fatal(err) } close(ch) - if savedErr == nil { - t.Fatal("wanted retrieval error") + if savedErr != retrieveError { + t.Errorf(`wanted retrieval error, got "%v"`, savedErr) } - if savedErr.Error() != "retrieve failed" { - t.Errorf(`wanted error "retrieve failed", got "%s"`, savedErr.Error()) + err = WaitUntilSwingStoreExportDone() + if err != retrieveError { + t.Errorf(`wanted retrieval error, got "%v"`, err) } } @@ -174,13 +192,16 @@ func TestDiscard(t *testing.T) { // simulate a normal Snapshot() call which calls SnapshotExtension() swingsetSnapshotter.takeSnapshot = func(height int64) { - swingsetSnapshotter.activeSnapshot.retrieved = true + activeOperation.exportRetrieved = true } err := swingsetSnapshotter.InitiateSnapshot(123) if err != nil { t.Fatal(err) } - <-swingsetSnapshotter.activeSnapshot.done + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } if discardCalled { t.Error("didn't want discard called") } @@ -191,7 +212,10 @@ func TestDiscard(t *testing.T) { if err != nil { t.Fatal(err) } - <-swingsetSnapshotter.activeSnapshot.done + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } if !discardCalled { t.Error("wanted discard called") } From 4b2357e4efdf4a3a59dba369ccbea43f86fa9e1d Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 06:31:15 +0000 Subject: [PATCH 069/109] chore(x/swingset): remove ExtensionSnapshotter from swing_store_exports_handler.go --- .../keeper/swing_store_exports_handler.go | 364 +++--------------- .../swing_store_exports_handler_test.go | 98 ----- .../cosmic-swingset/src/export-kernel-db.js | 2 +- 3 files changed, 61 insertions(+), 403 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index 71315d11924..45ca40b31d3 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -1,12 +1,10 @@ package keeper import ( - "bytes" "encoding/json" "errors" "fmt" "io" - "math" "os" "path/filepath" "regexp" @@ -14,24 +12,70 @@ import ( "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" - "github.com/cosmos/cosmos-sdk/baseapp" - snapshots "github.com/cosmos/cosmos-sdk/snapshots/types" - sdk "github.com/cosmos/cosmos-sdk/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) -// This module implements a Cosmos ExtensionSnapshotter to capture and restore -// state-sync Swingset state that is not part of the Cosmos DB. -// See docs/architecture/state-sync.md for a sequence diagram of how this -// module fits within the state-sync process. - -var _ snapshots.ExtensionSnapshotter = &ExtensionSnapshotter{} -var _ SwingStoreExportEventHandler = &ExtensionSnapshotter{} - -// SnapshotFormat 1 defines all extension payloads to be SwingStoreArtifact proto messages -const SnapshotFormat = 1 +// This module abstracts the generation and handling of swing-store exports, +// including the communication with the JS side to generate and restore them. +// +// Its interface derives from the following requirements: +// - Multiple golang components may perform swing-store export or import +// operations, but the JS side does not support concurrent operations as +// there are no legitimate use cases. +// - Some components cannot block the main execution while performing an export +// operation. In particular, cosmos's state-sync snapshot process cannot +// block the logic handling tendermint events. +// - The JS swing-store cannot access historical states. To generate +// deterministic exports, the export operations that cannot block must be able +// to synchronize with commit points that will change the JS swing-store. +// - The JS swing-store export logic does however support mutation of the +// JS swing-store state after an export operation has started. Such mutations +// do not affect the export that is produced, and can span multiple blocks. +// - This implies the commit synchronization is only necessary until the JS +// side of the export operation has started. +// - Some components, in particular state-sync, may need to perform other work +// alongside generating a swing-store export. This work similarly cannot block +// the main execution, but must allow for the swing-store synchronization +// that enables generating deterministic export. For state-sync, this work +// happens before the generated swing-store export can be consumed. +// +// The general approach taken is to implement a SwingStoreExportsHandler that +// implements the communication with the JS side, enforces that no concurrent +// operations take place, defers the consumption of the export to a provided +// SwingStoreExportEventHandler, and provides some synchronization methods to +// let the application enforce mutation boundaries. +// +// There should be a single SwingStoreExportsHandler instance, and all its method +// calls should be performed from the same goroutine (no mutex enforcement). +// +// The process of generating a SwingStore export proceeds as follow: +// - The component invokes swingStoreExportsHandler.InitiateExport with an +// eventHandler for the export. +// - InitiateExport verifies no other export operation is in progress and +// starts a goroutine to perform the export operation. It requests the JS +// side to start generating an export of the swing-store, and calls the +// eventHandler's OnExportStarted method with a function param allowing it to +// retrieve the export. +// - The cosmos app will call WaitUntilSwingStoreExportStarted before +// instructing the JS controller to commit its work, satisfying the +// deterministic exports requirement. +// - OnExportStarted must call the retrieve function before returning, however +// it may perform other work before. For cosmos state-sync snapshots, +// OnExportStarted will call app.Snapshot which will invoke the swingset +// module's ExtensionSnapshotter that will retrieve and process the +// swing-store export. +// - When the retrieve function is called, it blocks until the JS export is +// ready, then creates a SwingStoreExportProvider that abstract accessing +// the content of the export. The eventHandler's OnExportRetrieved is called +// with the export provider. +// - OnExportRetrieved reads the export using the provider. +// +// Restoring a swing-store export does not have similar non-blocking requirements. +// The component simply invokes swingStoreExportHandler.RestoreExport with a +// SwingStoreExportProvider representing the swing-store export to +// be restored, and RestoreExport will consume it and block until the JS side +// has completed the restore before returning. // exportManifest represents the content of the JS swing-store export manifest. // The export is exchanged between Cosmos and JS using the file system, and only @@ -260,23 +304,6 @@ func checkNotActive() error { return nil } -// snapshotDetails describes an in-progress state-sync snapshot -type snapshotDetails struct { - // blockHeight is the block height of this in-progress snapshot. - blockHeight uint64 - // logger is the destination for this snapshot's log messages. - logger log.Logger - // retrieveExport is the callback provided by the SwingStoreExportsHandler to - // retrieve the SwingStore's export provider which allows to read the export's - // artifacts used to populate this state-sync extension's payloads. - retrieveExport func() error - // payloadWriter is the callback provided by the state-sync snapshot manager - // for an extension to write a payload into the under-construction snapshot - // stream. It may be called multiple times, and often is (currently once per - // SwingStore export artifact). - payloadWriter snapshots.ExtensionPayloadWriter -} - // SwingStoreExportProvider gives access to a SwingStore "export data" and the // related artifacts. // A JS swing-store export is composed of optional "export data" (a set of @@ -325,76 +352,6 @@ type SwingStoreExportEventHandler interface { OnExportRetrieved(provider SwingStoreExportProvider) error } -// ExtensionSnapshotter is the cosmos state-sync extension snapshotter for the -// x/swingset module. -// It handles the SwingSet state that is not part of the Cosmos DB. Currently -// that state is solely composed of the SwingStore artifacts, as a copy of the -// SwingStore "export data" is streamed into the cosmos DB during execution. -// When performing a snapshot, the extension leverages the SwingStoreExportsHandler -// to retrieve the needed SwingStore artifacts. When restoring a snapshot, -// the extension combines the artifacts from the state-sync snapshot with the -// SwingStore "export data" from the already restored cosmos DB, to produce a -// full SwingStore export that can be imported to create a new JS swing-store DB. -// -// Since swing-store is not able to open its DB at historical commit points, -// the export operation must start before new changes are committed, aka before -// Swingset is instructed to commit the next block. For that reason the cosmos -// snapshot operation is currently mediated by the SwingStoreExportsHandler, -// which helps with the synchronization needed to generate consistent exports, -// while allowing SwingSet activity to proceed for the next block. This relies -// on the application calling WaitUntilSwingStoreExportStarted before -// instructing SwingSet to commit a new block. -type ExtensionSnapshotter struct { - isConfigured func() bool - // takeAppSnapshot is called by OnExportStarted when creating a snapshot - takeAppSnapshot func(height int64) - newRestoreContext func(height int64) sdk.Context - swingStoreExportsHandler *SwingStoreExportsHandler - getSwingStoreExportDataShadowCopy func(ctx sdk.Context) []*vstoragetypes.DataEntry - logger log.Logger - activeSnapshot *snapshotDetails -} - -// NewExtensionSnapshotter creates a new swingset ExtensionSnapshotter -func NewExtensionSnapshotter( - app *baseapp.BaseApp, - swingStoreExportsHandler *SwingStoreExportsHandler, - getSwingStoreExportDataShadowCopy func(ctx sdk.Context) []*vstoragetypes.DataEntry, -) *ExtensionSnapshotter { - return &ExtensionSnapshotter{ - isConfigured: func() bool { return app.SnapshotManager() != nil }, - takeAppSnapshot: app.Snapshot, - newRestoreContext: func(height int64) sdk.Context { - return app.NewUncachedContext(false, tmproto.Header{Height: height}) - }, - logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "extension snapshotter"), - swingStoreExportsHandler: swingStoreExportsHandler, - getSwingStoreExportDataShadowCopy: getSwingStoreExportDataShadowCopy, - activeSnapshot: nil, - } -} - -// SnapshotName returns the name of the snapshotter, it should be unique in the manager. -// Implements ExtensionSnapshotter -func (snapshotter *ExtensionSnapshotter) SnapshotName() string { - return types.ModuleName -} - -// SnapshotFormat returns the extension specific format used to encode the -// extension payloads when creating a snapshot. It's independent of the format -// used for the overall state-sync snapshot. -// Implements ExtensionSnapshotter -func (snapshotter *ExtensionSnapshotter) SnapshotFormat() uint32 { - return SnapshotFormat -} - -// SupportedFormats returns a list of extension specific payload formats it can -// restore from. -// Implements ExtensionSnapshotter -func (snapshotter *ExtensionSnapshotter) SupportedFormats() []uint32 { - return []uint32{SnapshotFormat} -} - // SwingStoreExportsHandler exclusively manages the communication with the JS side // related to swing-store exports, ensuring insensitivity to sub-block timing, // and enforcing concurrency requirements. @@ -422,25 +379,6 @@ func NewSwingStoreExportsHandler(logger log.Logger, blockingSend func(action vm. } } -// InitiateSnapshot initiates a snapshot for the given block height. -// If a snapshot is already in progress, or if no snapshot manager is -// configured, this will fail. -// -// The snapshot operation is performed in a goroutine. -// Use WaitUntilSwingStoreExportStarted to synchronize commit boundaries. -func (snapshotter *ExtensionSnapshotter) InitiateSnapshot(height int64) error { - if !snapshotter.isConfigured() { - return fmt.Errorf("snapshot manager not configured") - } - if height <= 0 { - return fmt.Errorf("block height must not be negative or 0") - } - - blockHeight := uint64(height) - - return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter) -} - // InitiateExport synchronously verifies that there is not already an export or // import operation in progress and initiates a new export in a goroutine, // via a dedicated SWING_STORE_EXPORT blockingSend action independent of other @@ -572,72 +510,6 @@ func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64 return nil } -// OnExportStarted performs the actual cosmos state-sync app snapshot. -// The cosmos implementation will ultimately call SnapshotExtension, which can -// retrieve and process the SwingStore artifacts. -// This method is invoked by the SwingStoreExportsHandler in a goroutine -// started by InitiateExport, only if no other SwingStore export operation is -// already in progress. -// -// Implements SwingStoreExportEventHandler -func (snapshotter *ExtensionSnapshotter) OnExportStarted(blockHeight uint64, retrieveExport func() error) error { - logger := snapshotter.logger.With("height", blockHeight) - - if blockHeight > math.MaxInt64 { - return fmt.Errorf("snapshot block height %d is higher than max int64", blockHeight) - } - height := int64(blockHeight) - - // We assume SwingStoreSnapshotter correctly guarded against concurrent snapshots - snapshotDetails := snapshotDetails{ - blockHeight: blockHeight, - logger: logger, - retrieveExport: retrieveExport, - } - snapshotter.activeSnapshot = &snapshotDetails - - snapshotter.takeAppSnapshot(height) - - snapshotter.activeSnapshot = nil - - // Unfortunately Cosmos BaseApp.Snapshot() does not report its errors. - return nil -} - -// SnapshotExtension is the method invoked by cosmos to write extension payloads -// into the underlying protobuf stream of the state-sync snapshot. -// This method is invoked by the cosmos snapshot manager in a goroutine it -// started during the call to OnExportStarted. However the snapshot manager -// fully synchronizes its goroutine with the goroutine started by the -// SwingStoreSnapshotter, making it safe to invoke callbacks of the -// SwingStoreSnapshotter. SnapshotExtension actually delegates writing -// extension payloads to OnExportRetrieved. -// -// Implements ExtensionSnapshotter -func (snapshotter *ExtensionSnapshotter) SnapshotExtension(blockHeight uint64, payloadWriter snapshots.ExtensionPayloadWriter) error { - logError := func(err error) error { - // The cosmos layers do a poor job of reporting errors, however - // SwingStoreExportsHandler arranges to report retrieve errors swallowed by - // takeAppSnapshot, so we manually report unexpected errors. - snapshotter.logger.Error("swingset snapshot extension failed", "err", err) - return err - } - - snapshotDetails := snapshotter.activeSnapshot - if snapshotDetails == nil { - // shouldn't happen, but return an error if it does - return logError(errors.New("no active swingset snapshot")) - } - - if snapshotDetails.blockHeight != blockHeight { - return logError(fmt.Errorf("swingset extension snapshot requested for unexpected height %d (expected %d)", blockHeight, snapshotDetails.blockHeight)) - } - - snapshotDetails.payloadWriter = payloadWriter - - return snapshotDetails.retrieveExport() -} - // retrieveExport retrieves an initiated export then invokes onExportRetrieved // with the retrieved export. // @@ -765,122 +637,6 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return nil } -// OnExportRetrieved handles the SwingStore export retrieved by the SwingStoreExportsHandler -// and writes it out to the SnapshotExtension's payloadWriter. -// This operation is invoked by the SwingStoreExportsHandler in the snapshot -// manager goroutine synchronized with SwingStoreExportsHandler's own goroutine. -// -// Implements SwingStoreExportEventHandler -func (snapshotter *ExtensionSnapshotter) OnExportRetrieved(provider SwingStoreExportProvider) error { - snapshotDetails := snapshotter.activeSnapshot - if snapshotDetails == nil || snapshotDetails.payloadWriter == nil { - // shouldn't happen, but return an error if it does - return errors.New("no active swingset snapshot") - } - - if snapshotDetails.blockHeight != provider.BlockHeight { - return fmt.Errorf("SwingStore export received for unexpected block height %d (app snapshot height is %d)", provider.BlockHeight, snapshotDetails.blockHeight) - } - - writeArtifactToPayload := func(artifact types.SwingStoreArtifact) error { - payloadBytes, err := artifact.Marshal() - if err != nil { - return err - } - - err = snapshotDetails.payloadWriter(payloadBytes) - if err != nil { - return err - } - - return nil - } - - for { - artifact, err := provider.ReadArtifact() - if err == io.EOF { - break - } else if err != nil { - return err - } - - err = writeArtifactToPayload(artifact) - if err != nil { - return err - } - } - - swingStoreExportDataEntries, err := provider.GetExportData() - if err != nil { - return err - } - if len(swingStoreExportDataEntries) == 0 { - return nil - } - - // For debugging, write out any retrieved export data as a single untrusted artifact - // which has the same encoding as the internal SwingStore export data representation: - // a sequence of [key, value] JSON arrays each terminated by a new line. - exportDataArtifact := types.SwingStoreArtifact{Name: UntrustedExportDataArtifactName} - - var encodedExportData bytes.Buffer - encoder := json.NewEncoder(&encodedExportData) - encoder.SetEscapeHTML(false) - for _, dataEntry := range swingStoreExportDataEntries { - entry := []string{dataEntry.Path, dataEntry.Value} - err := encoder.Encode(entry) - if err != nil { - return err - } - } - exportDataArtifact.Data = encodedExportData.Bytes() - - err = writeArtifactToPayload(exportDataArtifact) - encodedExportData.Reset() - if err != nil { - return err - } - return nil -} - -// RestoreExtension restores an extension state snapshot, -// the payload reader returns io.EOF when it reaches the extension boundaries. -// Implements ExtensionSnapshotter -func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, format uint32, payloadReader snapshots.ExtensionPayloadReader) error { - if format != SnapshotFormat { - return snapshots.ErrUnknownFormat - } - - if blockHeight > math.MaxInt64 { - return fmt.Errorf("snapshot block height %d is higher than max int64", blockHeight) - } - height := int64(blockHeight) - - // Retrieve the SwingStore "ExportData" from the verified vstorage data. - // At this point the content of the cosmos DB has been verified against the - // AppHash, which means the SwingStore data it contains can be used as the - // trusted root against which to validate the artifacts. - getExportData := func() ([]*vstoragetypes.DataEntry, error) { - ctx := snapshotter.newRestoreContext(height) - exportData := snapshotter.getSwingStoreExportDataShadowCopy(ctx) - return exportData, nil - } - - readArtifact := func() (artifact types.SwingStoreArtifact, err error) { - payloadBytes, err := payloadReader() - if err != nil { - return artifact, err - } - - err = artifact.Unmarshal(payloadBytes) - return artifact, err - } - - return snapshotter.swingStoreExportsHandler.RestoreExport( - SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}, - ) -} - // RestoreExport restores the JS swing-store using previously exported data and artifacts. // // Must be called by the main goroutine diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go index 884b286e566..32f12a6f6d7 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go @@ -6,20 +6,9 @@ import ( "testing" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" - sdk "github.com/cosmos/cosmos-sdk/types" "github.com/tendermint/tendermint/libs/log" ) -func newTestExtensionSnapshotter() *ExtensionSnapshotter { - logger := log.NewNopLogger() // log.NewTMLogger(log.NewSyncWriter( /* os.Stdout*/ io.Discard)).With("module", "sdk/app") - return &ExtensionSnapshotter{ - isConfigured: func() bool { return true }, - newRestoreContext: func(height int64) sdk.Context { return sdk.Context{} }, - logger: logger, - swingStoreExportsHandler: newTestSwingStoreExportsHandler(), - } -} - func newTestSwingStoreExportsHandler() *SwingStoreExportsHandler { logger := log.NewNopLogger() // log.NewTMLogger(log.NewSyncWriter( /* os.Stdout*/ io.Discard)).With("module", "sdk/app") return &SwingStoreExportsHandler{ @@ -61,51 +50,6 @@ func (taker testSwingStoreEventHandler) OnExportRetrieved(provider SwingStoreExp return taker.onExportRetrieved(provider) } -func TestExtensionSnapshotterInProgress(t *testing.T) { - extensionSnapshotter := newTestExtensionSnapshotter() - ch := make(chan struct{}) - extensionSnapshotter.takeAppSnapshot = func(height int64) { - <-ch - } - err := extensionSnapshotter.InitiateSnapshot(123) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - - err = extensionSnapshotter.InitiateSnapshot(456) - if err == nil { - t.Error("wanted error for snapshot in progress") - } - - err = extensionSnapshotter.RestoreExtension( - 456, SnapshotFormat, - func() ([]byte, error) { - return nil, io.EOF - }) - if err == nil { - t.Error("wanted error for snapshot in progress") - } - - close(ch) - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } - - err = extensionSnapshotter.InitiateSnapshot(456) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } -} - func TestSwingStoreSnapshotterInProgress(t *testing.T) { exportsHandler := newTestSwingStoreExportsHandler() ch := make(chan struct{}) @@ -148,48 +92,6 @@ func TestSwingStoreSnapshotterInProgress(t *testing.T) { } } -func TestExtensionSnapshotterNotConfigured(t *testing.T) { - extensionSnapshotter := newTestExtensionSnapshotter() - extensionSnapshotter.isConfigured = func() bool { return false } - err := extensionSnapshotter.InitiateSnapshot(123) - if err == nil { - t.Error("wanted error for unconfigured snapshot manager") - } -} - -func TestExtensionSnapshotterSecondCommit(t *testing.T) { - extensionSnapshotter := newTestExtensionSnapshotter() - - // Use a channel to block the snapshot goroutine after it has started but before it exits. - ch := make(chan struct{}) - extensionSnapshotter.takeAppSnapshot = func(height int64) { - <-ch - } - - // First run through app.Commit() - err := WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - err = extensionSnapshotter.InitiateSnapshot(123) - if err != nil { - t.Fatal(err) - } - - // Second run through app.Commit() - should return right away - err = WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - - // close the signaling channel to let goroutine exit - close(ch) - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } -} - func TestSwingStoreSnapshotterSecondCommit(t *testing.T) { exportsHandler := newTestSwingStoreExportsHandler() diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index 677c4f920ab..970be626a0d 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -22,7 +22,7 @@ import { makeProcessValue } from './helpers/process-value.js'; /** @typedef {'current' | 'archival' | 'debug'} SwingStoreExportMode */ // ExportManifestFilename is the manifest filename which must be synchronized -// with the golang SwingSetSnapshotter in golang/cosmos/x/swingset/keeper/snapshotter.go +// with the golang SwingStoreExportsHandler in golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go export const ExportManifestFileName = 'export-manifest.json'; // eslint-disable-next-line jsdoc/require-returns-check From ff6cda9b54e71ac0d47aa93cf7cfead009c61309 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 4 Aug 2023 06:23:43 +0000 Subject: [PATCH 070/109] chore(x/swingset): remove SwingStoreExportsHandler from extension_snapshotter.go --- .../swingset/keeper/extension_snapshotter.go | 695 ------------------ .../keeper/extension_snapshotter_test.go | 239 ------ 2 files changed, 934 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index 71315d11924..f7902ebb4da 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -7,17 +7,12 @@ import ( "fmt" "io" "math" - "os" - "path/filepath" - "regexp" - "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" "github.com/cosmos/cosmos-sdk/baseapp" snapshots "github.com/cosmos/cosmos-sdk/snapshots/types" sdk "github.com/cosmos/cosmos-sdk/types" - sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/tendermint/tendermint/libs/log" tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) @@ -33,233 +28,6 @@ var _ SwingStoreExportEventHandler = &ExtensionSnapshotter{} // SnapshotFormat 1 defines all extension payloads to be SwingStoreArtifact proto messages const SnapshotFormat = 1 -// exportManifest represents the content of the JS swing-store export manifest. -// The export is exchanged between Cosmos and JS using the file system, and only -// the directory containing the export is exchanged with a blockingSend. The -// manifest is a JSON file with the agreed upon file name of -// "export-manifest.json" in the export directory. It contains the file names -// for the "export data" (described in the godoc for exportDataFilename), and -// for the opaque artifacts of the export. -type exportManifest struct { - // BlockHeight is the block height of the manifest. - BlockHeight uint64 `json:"blockHeight,omitempty"` - // Data is the filename of the export data. - Data string `json:"data,omitempty"` - // Artifacts is the list of [artifact name, file name] pairs. - Artifacts [][2]string `json:"artifacts"` -} - -// ExportManifestFilename is the manifest filename which must be synchronized with the JS export/import tooling -// See packages/cosmic-swingset/src/export-kernel-db.js and packages/cosmic-swingset/src/import-kernel-db.js -const ExportManifestFilename = "export-manifest.json" - -// For restore operations, the swing-store "export data" is exchanged with the -// JS side as a file which encodes "export data" entries as a sequence of -// [key, value] JSON arrays each terminated by a new line. -// NB: this is not technically jsonlines since the entries are new line -// terminated instead of being new line separated, however the parsers in both -// JS and golang handle such extra whitespace. -const exportDataFilename = "export-data.jsonl" - -// UntrustedExportDataArtifactName is a special artifact name that the provider -// and consumer of an export can use to indicate the presence of a synthetic -// artifact containing untrusted "export data". This artifact must not end up in -// the list of artifacts imported by the JS import tooling (which would fail). -const UntrustedExportDataArtifactName = "UNTRUSTED-EXPORT-DATA" -const untrustedExportDataFilename = "untrusted-export-data.jsonl" - -const exportedFilesMode = 0644 - -// swingStoreExportActionType is the action type used for all swing-store -// export blockingSend, and synchronized with the JS side in -// packages/internal/src/action-types.js -const swingStoreExportActionType = "SWING_STORE_EXPORT" - -// initiateRequest is the request type for initiating an export -const initiateRequest = "initiate" - -type swingStoreInitiateExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "initiate" - BlockHeight uint64 `json:"blockHeight"` // expected blockHeight -} - -// retrieveRequest is the request type for retrieving an initiated export -const retrieveRequest = "retrieve" - -type swingStoreRetrieveExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "retrieve" -} -type swingStoreRetrieveResult = string - -// discardRequest is the request type for discarding an initiated but an export -// that was not retrieved -const discardRequest = "discard" - -type swingStoreDiscardExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "discard" -} - -// restoreRequest is the request type for restoring an export -const restoreRequest = "restore" - -type swingStoreRestoreExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "restore" - BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if deferring blockHeight to the manifest - Args [1]string `json:"args"` // args[1] is the directory in which the export to restore from is located -} - -var disallowedArtifactNameChar = regexp.MustCompile(`[^-_.a-zA-Z0-9]`) - -// sanitizeArtifactName searches a string for all characters -// other than ASCII alphanumerics, hyphens, underscores, and dots, -// and replaces each of them with a hyphen. -func sanitizeArtifactName(name string) string { - return disallowedArtifactNameChar.ReplaceAllString(name, "-") -} - -type operationDetails struct { - // isRestore indicates whether the operation in progress is a restore. - // It is assigned at creation and never mutated. - isRestore bool - // blockHeight is the block height of this in-progress operation. - // It is assigned at creation and never mutated. - blockHeight uint64 - // logger is the destination for this operation's log messages. - // It is assigned at creation and never mutated. - logger log.Logger - // exportStartedResult is used to synchronize the commit boundary by the - // component performing the export operation to ensure export determinism. - // unused for restore operations - // It is assigned at creation and never mutated. The started goroutine - // writes into the channel and closes it. The main goroutine reads from the - // channel. - exportStartedResult chan error - // exportRetrieved is an internal flag indicating whether the JS generated - // export was retrieved. It can be false regardless of the component's - // eventHandler reporting an error or not. It is only indicative of whether - // the component called retrieveExport, and used to control whether to send - // a discard request if the JS side stayed responsible for the generated but - // un-retrieved export. - // It is only read or written by the export operation's goroutine. - exportRetrieved bool - // exportDone is a channel that is closed when the active export operation - // is complete. - // It is assigned at creation and never mutated. The started goroutine - // writes into the channel and closes it. The main goroutine reads from the - // channel. - exportDone chan error -} - -// activeOperation is a global variable reflecting a swing-store import or -// export in progress on the JS side. -// This variable is only assigned to through calls of the public methods of -// SwingStoreExportsHandler, which rely on the exportDone channel getting -// closed to nil this variable. -// Only the calls to InitiateExport and RestoreExport set this to a non-nil -// value. The goroutine in which these calls occur is referred to as the -// "main goroutine". That goroutine may be different over time, but it's the -// caller's responsibility to ensure those goroutines do not overlap calls to -// the SwingStoreExportsHandler public methods. -// See also the details of each field for the conditions under which they are -// accessed. -var activeOperation *operationDetails - -// WaitUntilSwingStoreExportStarted synchronizes with an export operation in -// progress, if any. -// The JS swing-store export must have started before a new block is committed -// to ensure the content of the export is the one expected. The app must call -// this method before sending a commit action to the JS controller. -// -// Waits for a just initiated export operation to have started in its goroutine. -// If no operation is in progress (InitiateExport hasn't been called or -// already completed), or if we previously checked if the operation had started, -// returns immediately. -// -// Must be called by the main goroutine -func WaitUntilSwingStoreExportStarted() error { - operationDetails := activeOperation - if operationDetails == nil { - return nil - } - // Block until the active operation has started, saving the result. - // The operation's goroutine only produces a value in case of an error, - // and closes the channel once the export has started or failed. - // Only the first call after an export was initiated will report an error. - startErr := <-operationDetails.exportStartedResult - - // Check if the active export operation is done, and if so, nil it out so - // future calls are faster. - select { - case <-operationDetails.exportDone: - // If there was a start error, the channel is already closed at this point. - activeOperation = nil - default: - // don't wait for it to finish - // If there is no start error, the operation may take an arbitrary amount - // of time to terminate, likely spanning multiple blocks. However this - // function will only ever observe the expected activeOperation since the - // internal checkNotActive() called immediately on InitiateSnapshot will - // nil-out activeOperation if a stale value was sill sitting around. - } - - return startErr -} - -// WaitUntilSwingStoreExportDone synchronizes with the completion of an export -// operation in progress, if any. -// Only a single swing-store operation may execute at a time. Calling -// InitiateExport or RestoreExport will fail if a swing-store operation is -// already in progress. Furthermore, a component may need to know once an -// export it initiated has completed. Once this method call returns, the -// goroutine is guaranteed to have terminated, and the SwingStoreExportEventHandler -// provided to InitiateExport to no longer be in use. -// -// Reports any error that may have occurred from InitiateExport. -// If no export operation is in progress (InitiateExport hasn't been called or -// already completed), or if we previously checked if an export had completed, -// returns immediately. -// -// Must be called by the main goroutine -func WaitUntilSwingStoreExportDone() error { - operationDetails := activeOperation - if operationDetails == nil { - return nil - } - // Block until the active export has completed. - // The export operation's goroutine only produces a value in case of an error, - // and closes the channel once the export has completed or failed. - // Only the first call after an export was initiated will report an error. - exportErr := <-operationDetails.exportDone - activeOperation = nil - - return exportErr -} - -// checkNotActive returns an error if there is an active operation. -// -// Always internally called by the main goroutine -func checkNotActive() error { - operationDetails := activeOperation - if operationDetails != nil { - select { - case <-operationDetails.exportDone: - // nil-out any stale operation - activeOperation = nil - default: - if operationDetails.isRestore { - return fmt.Errorf("restore operation already in progress for height %d", operationDetails.blockHeight) - } else { - return fmt.Errorf("export operation already in progress for height %d", operationDetails.blockHeight) - } - } - } - return nil -} - // snapshotDetails describes an in-progress state-sync snapshot type snapshotDetails struct { // blockHeight is the block height of this in-progress snapshot. @@ -277,54 +45,6 @@ type snapshotDetails struct { payloadWriter snapshots.ExtensionPayloadWriter } -// SwingStoreExportProvider gives access to a SwingStore "export data" and the -// related artifacts. -// A JS swing-store export is composed of optional "export data" (a set of -// key/value pairs), and opaque artifacts (a name and data as bytes) that -// complement the "export data". -// The abstraction is similar to the JS side swing-store export abstraction, -// but without the ability to list artifacts or random access them. -// -// A swing-store export for creating a state-sync snapshot will not contain any -// "export data" since this information is reflected every block into the -// verified cosmos DB. -// On state-sync snapshot restore, the swingset ExtensionSnapshotter will -// synthesize a provider for this module with "export data" sourced from the -// restored cosmos DB, and artifacts from the extension's payloads. When -// importing, the JS swing-store will verify that the artifacts match hashes -// contained in the trusted "export data". -type SwingStoreExportProvider struct { - // BlockHeight is the block height of the SwingStore export. - BlockHeight uint64 - // GetExportData is a function to return the "export data" of the SwingStore export, if any. - GetExportData func() ([]*vstoragetypes.DataEntry, error) - // ReadArtifact is a function to return the next unread artifact in the SwingStore export. - // It errors with io.EOF upon reaching the end of the artifact list. - ReadArtifact func() (types.SwingStoreArtifact, error) -} - -// SwingStoreExportEventHandler is used to handle events that occur while generating -// a swing-store export. It is provided to SwingStoreExportsHandler.InitiateExport. -type SwingStoreExportEventHandler interface { - // OnExportStarted is called by InitiateExport in a goroutine after the - // swing-store export has successfully started. - // This is where the component performing the export must initiate its own - // off main goroutine work, which results in retrieving and processing the - // swing-store export. - // - // Must call the retrieveExport function before returning, which will in turn - // synchronously invoke OnExportRetrieved once the swing-store export is ready. - OnExportStarted(blockHeight uint64, retrieveExport func() error) error - // OnExportRetrieved is called when the swing-store export has been retrieved, - // during the retrieveExport invocation. - // The provider is not a return value to retrieveExport in order to - // report errors in components that are unable to propagate errors back to the - // OnExportStarted result, like cosmos state-sync ExtensionSnapshotter. - // The implementation must synchronously consume the provider, which becomes - // invalid after the method returns. - OnExportRetrieved(provider SwingStoreExportProvider) error -} - // ExtensionSnapshotter is the cosmos state-sync extension snapshotter for the // x/swingset module. // It handles the SwingSet state that is not part of the Cosmos DB. Currently @@ -395,33 +115,6 @@ func (snapshotter *ExtensionSnapshotter) SupportedFormats() []uint32 { return []uint32{SnapshotFormat} } -// SwingStoreExportsHandler exclusively manages the communication with the JS side -// related to swing-store exports, ensuring insensitivity to sub-block timing, -// and enforcing concurrency requirements. -// The caller of this submodule must arrange block level commit synchronization, -// to ensure the results are deterministic. -// -// Some blockingSend calls performed by this submodule are non-deterministic. -// This submodule will send messages to JS from goroutines at unpredictable -// times, but this is safe because when handling the messages, the JS side -// does not perform operations affecting consensus and ignores state changes -// since committing the previous block. -// Some other blockingSend calls however do change the JS swing-store and -// must happen before the Swingset controller on the JS side was inited, in -// which case the mustNotBeInited parameter will be set to true. -type SwingStoreExportsHandler struct { - logger log.Logger - blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error) -} - -// NewSwingStoreExportsHandler creates a SwingStoreExportsHandler -func NewSwingStoreExportsHandler(logger log.Logger, blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error)) *SwingStoreExportsHandler { - return &SwingStoreExportsHandler{ - logger: logger.With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "SwingStoreExportsHandler"), - blockingSend: blockingSend, - } -} - // InitiateSnapshot initiates a snapshot for the given block height. // If a snapshot is already in progress, or if no snapshot manager is // configured, this will fail. @@ -441,137 +134,6 @@ func (snapshotter *ExtensionSnapshotter) InitiateSnapshot(height int64) error { return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter) } -// InitiateExport synchronously verifies that there is not already an export or -// import operation in progress and initiates a new export in a goroutine, -// via a dedicated SWING_STORE_EXPORT blockingSend action independent of other -// block related blockingSends, calling the given eventHandler when a related -// blockingSend completes. If the eventHandler doesn't retrieve the export, -// then it sends another blockingSend action to discard it. -// -// eventHandler is invoked solely from the spawned goroutine. -// The "started" and "done" events can be used for synchronization with an -// active operation taking place in the goroutine, by calling respectively the -// WaitUntilSwingStoreExportStarted and WaitUntilSwingStoreExportDone methods -// from the goroutine that initiated the export. -// -// Must be called by the main goroutine -func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64, eventHandler SwingStoreExportEventHandler) error { - err := checkNotActive() - if err != nil { - return err - } - - logger := exportsHandler.logger.With("height", blockHeight) - - // Indicate that an export operation has been initiated by setting the global - // activeOperation var. - // This structure is used to synchronize with the goroutine spawned below. - operationDetails := &operationDetails{ - blockHeight: blockHeight, - logger: logger, - exportStartedResult: make(chan error, 1), - exportRetrieved: false, - exportDone: make(chan error, 1), - } - activeOperation = operationDetails - - go func() { - var err error - var startedErr error - defer func() { - if err == nil { - err = startedErr - } - if err != nil { - operationDetails.exportDone <- err - } - // First, indicate an export is no longer in progress. This ensures that - // for an operation with a start error, a call to WaitUntilSwingStoreExportStarted - // waiting on exportStartedResult will always find the operation has - // completed, and clear the active operation instead of racing if the - // channel close order was reversed. - close(operationDetails.exportDone) - // Then signal the current export operation that it failed to start, - // which will be reported to a waiting WaitUntilSwingStoreExportStarted, - // or the next call otherwise. - if startedErr != nil { - operationDetails.exportStartedResult <- startedErr - close(operationDetails.exportStartedResult) - } - }() - - initiateAction := &swingStoreInitiateExportAction{ - Type: swingStoreExportActionType, - BlockHeight: blockHeight, - Request: initiateRequest, - } - - // blockingSend for SWING_STORE_EXPORT action is safe to call from a goroutine - _, startedErr = exportsHandler.blockingSend(initiateAction, false) - - if startedErr != nil { - logger.Error("failed to initiate swing-store export", "err", startedErr) - // The deferred function will communicate the error and close channels - // in the appropriate order. - return - } - - // Signal that the export operation has started successfully in the goroutine. - // Calls to WaitUntilSwingStoreExportStarted will no longer block. - close(operationDetails.exportStartedResult) - - // The user provided OnExportStarted function should call retrieveExport() - var retrieveErr error - err = eventHandler.OnExportStarted(blockHeight, func() error { - activeOperationDetails := activeOperation - if activeOperationDetails != operationDetails || operationDetails.exportRetrieved { - // shouldn't happen, but return an error if it does - return errors.New("export operation no longer active") - } - - retrieveErr = exportsHandler.retrieveExport(eventHandler.OnExportRetrieved) - - return retrieveErr - }) - - // Restore any retrieve error swallowed by OnExportStarted - if err == nil { - err = retrieveErr - } - if err != nil { - logger.Error("failed to process swing-store export", "err", err) - } - - // Check whether the JS generated export was retrieved by eventHandler - if operationDetails.exportRetrieved { - return - } - - // Discarding the export so invalidate retrieveExport - operationDetails.exportRetrieved = true - - discardAction := &swingStoreDiscardExportAction{ - Type: swingStoreExportActionType, - Request: discardRequest, - } - _, discardErr := exportsHandler.blockingSend(discardAction, false) - - if discardErr != nil { - logger.Error("failed to discard swing-store export", "err", err) - } - - if err == nil { - err = discardErr - } else if discardErr != nil { - // Safe to wrap error and use detailed error info since this error - // will not go back into swingset layers - err = sdkerrors.Wrapf(err, "failed to discard swing-store export after failing to process export: %+v", discardErr) - } - }() - - return nil -} - // OnExportStarted performs the actual cosmos state-sync app snapshot. // The cosmos implementation will ultimately call SnapshotExtension, which can // retrieve and process the SwingStore artifacts. @@ -638,133 +200,6 @@ func (snapshotter *ExtensionSnapshotter) SnapshotExtension(blockHeight uint64, p return snapshotDetails.retrieveExport() } -// retrieveExport retrieves an initiated export then invokes onExportRetrieved -// with the retrieved export. -// -// It performs a SWING_STORE_EXPORT blockingSend which on success returns a -// string of the directory containing the JS swing-store export. It then reads -// the export manifest generated by the JS side, and synthesizes a -// SwingStoreExportProvider for the onExportRetrieved callback to access the -// retrieved swing-store export. -// The export manifest format is described by the exportManifest struct. -// -// After calling onExportRetrieved, the export directory and its contents are -// deleted. -// -// This will block until the export is ready. Internally invoked by the -// InitiateExport logic in the export operation's goroutine. -func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved func(provider SwingStoreExportProvider) error) (err error) { - operationDetails := activeOperation - if operationDetails == nil { - // shouldn't happen, but return an error if it does - return errors.New("no active swing-store export operation") - } - - blockHeight := operationDetails.blockHeight - - action := &swingStoreRetrieveExportAction{ - Type: swingStoreExportActionType, - Request: retrieveRequest, - } - out, err := exportsHandler.blockingSend(action, false) - - if err != nil { - return err - } - operationDetails.exportRetrieved = true - - var exportDir swingStoreRetrieveResult - err = json.Unmarshal([]byte(out), &exportDir) - if err != nil { - return err - } - - defer os.RemoveAll(exportDir) - - rawManifest, err := os.ReadFile(filepath.Join(exportDir, ExportManifestFilename)) - if err != nil { - return err - } - - var manifest exportManifest - err = json.Unmarshal(rawManifest, &manifest) - if err != nil { - return err - } - - if manifest.BlockHeight != blockHeight { - return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, blockHeight) - } - - getExportData := func() ([]*vstoragetypes.DataEntry, error) { - entries := []*vstoragetypes.DataEntry{} - if manifest.Data == "" { - return entries, nil - } - - dataFile, err := os.Open(filepath.Join(exportDir, manifest.Data)) - if err != nil { - return nil, err - } - defer dataFile.Close() - - decoder := json.NewDecoder(dataFile) - for { - var jsonEntry []string - err = decoder.Decode(&jsonEntry) - if err == io.EOF { - break - } else if err != nil { - return nil, err - } - - if len(jsonEntry) != 2 { - return nil, fmt.Errorf("invalid export data entry (length %d)", len(jsonEntry)) - } - entry := vstoragetypes.DataEntry{Path: jsonEntry[0], Value: jsonEntry[1]} - entries = append(entries, &entry) - } - - return entries, nil - } - - nextArtifact := 0 - - readArtifact := func() (artifact types.SwingStoreArtifact, err error) { - if nextArtifact == len(manifest.Artifacts) { - return artifact, io.EOF - } else if nextArtifact > len(manifest.Artifacts) { - return artifact, fmt.Errorf("exceeded expected artifact count: %d > %d", nextArtifact, len(manifest.Artifacts)) - } - - artifactEntry := manifest.Artifacts[nextArtifact] - nextArtifact++ - - artifactName := artifactEntry[0] - fileName := artifactEntry[1] - if artifactName == UntrustedExportDataArtifactName { - return artifact, fmt.Errorf("unexpected export artifact name %s", artifactName) - } - artifact.Name = artifactName - artifact.Data, err = os.ReadFile(filepath.Join(exportDir, fileName)) - - return artifact, err - } - - err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}) - if err != nil { - return err - } - - // if nextArtifact != len(manifest.Artifacts) { - // return errors.New("not all export artifacts were retrieved") - // } - - operationDetails.logger.Info("retrieved swing-store export", "exportDir", exportDir) - - return nil -} - // OnExportRetrieved handles the SwingStore export retrieved by the SwingStoreExportsHandler // and writes it out to the SnapshotExtension's payloadWriter. // This operation is invoked by the SwingStoreExportsHandler in the snapshot @@ -880,133 +315,3 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}, ) } - -// RestoreExport restores the JS swing-store using previously exported data and artifacts. -// -// Must be called by the main goroutine -func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStoreExportProvider) error { - err := checkNotActive() - if err != nil { - return err - } - - blockHeight := provider.BlockHeight - - // We technically don't need to create an active operation here since both - // InitiateExport and RestoreExport should only be called from the main - // goroutine, but it doesn't cost much to add in case things go wrong. - operationDetails := &operationDetails{ - isRestore: true, - blockHeight: blockHeight, - logger: exportsHandler.logger, - // goroutine synchronization is unnecessary since anything checking should - // be called from the same goroutine. - // Effectively WaitUntilSwingStoreExportStarted would block infinitely and - // exportsHandler.InitiateExport will error when calling checkNotActive. - exportStartedResult: nil, - exportDone: nil, - } - activeOperation = operationDetails - defer func() { - activeOperation = nil - }() - - exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-swing-store-restore-%d-*", blockHeight)) - if err != nil { - return err - } - defer os.RemoveAll(exportDir) - - manifest := exportManifest{ - BlockHeight: blockHeight, - } - - exportDataEntries, err := provider.GetExportData() - if err != nil { - return err - } - - if len(exportDataEntries) > 0 { - manifest.Data = exportDataFilename - exportDataFile, err := os.OpenFile(filepath.Join(exportDir, exportDataFilename), os.O_CREATE|os.O_WRONLY, exportedFilesMode) - if err != nil { - return err - } - defer exportDataFile.Close() - - encoder := json.NewEncoder(exportDataFile) - encoder.SetEscapeHTML(false) - for _, dataEntry := range exportDataEntries { - entry := []string{dataEntry.Path, dataEntry.Value} - err := encoder.Encode(entry) - if err != nil { - return err - } - } - - err = exportDataFile.Sync() - if err != nil { - return err - } - } - - writeExportFile := func(filename string, data []byte) error { - return os.WriteFile(filepath.Join(exportDir, filename), data, exportedFilesMode) - } - - for { - artifact, err := provider.ReadArtifact() - if err == io.EOF { - break - } else if err != nil { - return err - } - - if artifact.Name != UntrustedExportDataArtifactName { - // An artifact is only verifiable by the JS swing-store import using the - // information contained in the "export data". - // Since we cannot trust the source of the artifact at this point, - // including that the artifact's name is genuine, we generate a safe and - // unique filename from the artifact's name we received, by substituting - // any non letters-digits-hyphen-underscore-dot by a hyphen, and - // prefixing with an incremented id. - // The filename is not used for any purpose in the import logic. - filename := sanitizeArtifactName(artifact.Name) - filename = fmt.Sprintf("%d-%s", len(manifest.Artifacts), filename) - manifest.Artifacts = append(manifest.Artifacts, [2]string{artifact.Name, filename}) - err = writeExportFile(filename, artifact.Data) - } else { - // Pseudo artifact containing untrusted export data which may have been - // saved separately for debugging purposes (not referenced from the manifest) - err = writeExportFile(untrustedExportDataFilename, artifact.Data) - } - if err != nil { - return err - } - } - - manifestBytes, err := json.MarshalIndent(manifest, "", " ") - if err != nil { - return err - } - err = writeExportFile(ExportManifestFilename, manifestBytes) - if err != nil { - return err - } - - action := &swingStoreRestoreExportAction{ - Type: swingStoreExportActionType, - BlockHeight: blockHeight, - Request: restoreRequest, - Args: [1]string{exportDir}, - } - - _, err = exportsHandler.blockingSend(action, true) - if err != nil { - return err - } - - exportsHandler.logger.Info("restored swing-store export", "exportDir", exportDir, "height", blockHeight) - - return nil -} diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go index 884b286e566..85440591c4f 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go @@ -1,11 +1,9 @@ package keeper import ( - "errors" "io" "testing" - "github.com/Agoric/agoric-sdk/golang/cosmos/vm" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/tendermint/tendermint/libs/log" ) @@ -20,47 +18,6 @@ func newTestExtensionSnapshotter() *ExtensionSnapshotter { } } -func newTestSwingStoreExportsHandler() *SwingStoreExportsHandler { - logger := log.NewNopLogger() // log.NewTMLogger(log.NewSyncWriter( /* os.Stdout*/ io.Discard)).With("module", "sdk/app") - return &SwingStoreExportsHandler{ - logger: logger, - blockingSend: func(action vm.Jsonable, mustNotBeInited bool) (string, error) { return "", nil }, - } -} - -var _ SwingStoreExportEventHandler = testSwingStoreEventHandler{} - -type testSwingStoreEventHandler struct { - onExportStarted func(height uint64, retrieveExport func() error) error - onExportRetrieved func(provider SwingStoreExportProvider) error -} - -func newTestSwingStoreEventHandler() testSwingStoreEventHandler { - return testSwingStoreEventHandler{ - onExportStarted: func(height uint64, retrieveExport func() error) error { - return retrieveExport() - }, - onExportRetrieved: func(provider SwingStoreExportProvider) error { - for { - _, err := provider.ReadArtifact() - if err == io.EOF { - return nil - } else if err != nil { - return err - } - } - }, - } -} - -func (taker testSwingStoreEventHandler) OnExportStarted(height uint64, retrieveExport func() error) error { - return taker.onExportStarted(height, retrieveExport) -} - -func (taker testSwingStoreEventHandler) OnExportRetrieved(provider SwingStoreExportProvider) error { - return taker.onExportRetrieved(provider) -} - func TestExtensionSnapshotterInProgress(t *testing.T) { extensionSnapshotter := newTestExtensionSnapshotter() ch := make(chan struct{}) @@ -106,48 +63,6 @@ func TestExtensionSnapshotterInProgress(t *testing.T) { } } -func TestSwingStoreSnapshotterInProgress(t *testing.T) { - exportsHandler := newTestSwingStoreExportsHandler() - ch := make(chan struct{}) - exportEventHandler := newTestSwingStoreEventHandler() - exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { - <-ch - return nil - } - err := exportsHandler.InitiateExport(123, exportEventHandler) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - - err = exportsHandler.InitiateExport(456, newTestSwingStoreEventHandler()) - if err == nil { - t.Error("wanted error for export operation in progress") - } - - err = exportsHandler.RestoreExport(SwingStoreExportProvider{BlockHeight: 456}) - if err == nil { - t.Error("wanted error for export operation in progress") - } - - close(ch) - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } - err = exportsHandler.InitiateExport(456, exportEventHandler) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } -} - func TestExtensionSnapshotterNotConfigured(t *testing.T) { extensionSnapshotter := newTestExtensionSnapshotter() extensionSnapshotter.isConfigured = func() bool { return false } @@ -189,157 +104,3 @@ func TestExtensionSnapshotterSecondCommit(t *testing.T) { t.Fatal(err) } } - -func TestSwingStoreSnapshotterSecondCommit(t *testing.T) { - exportsHandler := newTestSwingStoreExportsHandler() - - exportEventHandler := newTestSwingStoreEventHandler() - // Use a channel to block the snapshot goroutine after it has started but before it exits. - ch := make(chan struct{}) - exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { - <-ch - return nil - } - - // First run through app.Commit() - err := WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - err = exportsHandler.InitiateExport(123, exportEventHandler) - if err != nil { - t.Fatal(err) - } - - // Second run through app.Commit() - should return right away - err = WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - - // close the signaling channel to let goroutine exit - close(ch) - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } -} - -func TestSwingStoreSnapshotterInitiateFails(t *testing.T) { - exportsHandler := newTestSwingStoreExportsHandler() - exportEventHandler := newTestSwingStoreEventHandler() - exportsHandler.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { - initiateAction, ok := action.(*swingStoreInitiateExportAction) - if ok && initiateAction.Request == "initiate" { - return "", errors.New("initiate failed") - } - return "", nil - } - - err := exportsHandler.InitiateExport(123, exportEventHandler) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportStarted() - if err == nil { - t.Fatal("wanted initiation error") - } - if err.Error() != "initiate failed" { - t.Errorf(`wanted error "initiate failed", got "%s"`, err.Error()) - } - // another wait should succeed without error - err = WaitUntilSwingStoreExportStarted() - if err != nil { - t.Error(err) - } - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } -} - -func TestSwingStoreSnapshotterRetrievalFails(t *testing.T) { - exportsHandler := newTestSwingStoreExportsHandler() - var retrieveError error - exportsHandler.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { - retrieveAction, ok := action.(*swingStoreRetrieveExportAction) - if ok && retrieveAction.Request == "retrieve" { - retrieveError = errors.New("retrieve failed") - return "", retrieveError - } - return "", nil - } - exportEventHandler := newTestSwingStoreEventHandler() - var savedErr error - ch := make(chan struct{}) - exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { - savedErr = retrieveExport() - <-ch - return savedErr - } - - err := exportsHandler.InitiateExport(123, exportEventHandler) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportStarted() - if err != nil { - t.Fatal(err) - } - - close(ch) - if savedErr != retrieveError { - t.Errorf(`wanted retrieval error, got "%v"`, savedErr) - } - err = WaitUntilSwingStoreExportDone() - if err != retrieveError { - t.Errorf(`wanted retrieval error, got "%v"`, err) - } -} - -func TestSwingStoreSnapshotterDiscard(t *testing.T) { - discardCalled := false - exportsHandler := newTestSwingStoreExportsHandler() - exportsHandler.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { - discardAction, ok := action.(*swingStoreDiscardExportAction) - if ok && discardAction.Request == "discard" { - discardCalled = true - } - return "", nil - } - - // simulate an onExportStarted which successfully calls retrieveExport() - exportEventHandler := newTestSwingStoreEventHandler() - exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { - activeOperation.exportRetrieved = true - return nil - } - err := exportsHandler.InitiateExport(123, exportEventHandler) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } - if discardCalled { - t.Error("didn't want discard called") - } - - // simulate an onExportStarted which doesn't call retrieveExport() - exportEventHandler = newTestSwingStoreEventHandler() - exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { - return nil - } - err = exportsHandler.InitiateExport(456, exportEventHandler) - if err != nil { - t.Fatal(err) - } - err = WaitUntilSwingStoreExportDone() - if err != nil { - t.Fatal(err) - } - if !discardCalled { - t.Error("wanted discard called") - } -} From 5ec5ad8cd8b0b9e1601de6d108107197e9f9a525 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 18 Jul 2023 17:21:58 +0000 Subject: [PATCH 071/109] refactor(x/swingset): split snapshotter --- docs/architecture/state-sync.md | 110 ++-- golang/cosmos/app/app.go | 23 +- golang/cosmos/x/swingset/alias.go | 13 +- .../cosmos/x/swingset/keeper/snapshotter.go | 564 ++++++++++++++---- .../x/swingset/keeper/snapshotter_test.go | 209 +++++-- 5 files changed, 694 insertions(+), 225 deletions(-) diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md index 9c214fe2d8f..3f7b263ea5b 100644 --- a/docs/architecture/state-sync.md +++ b/docs/architecture/state-sync.md @@ -9,9 +9,11 @@ sequenceDiagram participant A-M as App participant MS-M as MultiStore participant SSES-M as SwingSet ExtensionSnapshotter + participant SSEH-M as SwingStoreExportsHandler end box whitesmoke App snapshot goroutine + participant SSEH-AS as SwingStoreExportsHandler participant SSES-AS as SwingSet ExtensionSnapshotter participant A-AS as App participant SM-AS as Snapshot manager @@ -21,6 +23,7 @@ sequenceDiagram participant SM-CS as Snapshot manager participant MS-CS as MultiStore participant SSES-CS as SwingSet ExtensionSnapshotter + participant SSEH-CS as SwingStoreExportsHandler participant D-CS as Disk end @@ -36,8 +39,8 @@ sequenceDiagram end TM->>+A-M: Commit - A-M->>+SSES-M: WaitUntilSwingStoreExportStarted() - SSES-M-->>-A-M: + A-M->>+SSEH-M: WaitUntilSwingStoreExportStarted() + SSEH-M-->>-A-M: A-M->>+CM: COMMIT_BLOCK CM->>CM: swingStore.commit() CM-->>-A-M: @@ -64,8 +67,8 @@ sequenceDiagram A-M-->>-TM: TM->>+A-M: Commit - A-M->>+SSES-M: WaitUntilSwingStoreExportStarted() - SSES-M-->>-A-M: + A-M->>+SSEH-M: WaitUntilSwingStoreExportStarted() + SSEH-M-->>-A-M: A-M->>+CM: COMMIT_BLOCK CM->>CM: swingStore.commit() CM-->>-A-M: @@ -74,25 +77,28 @@ sequenceDiagram CM-->>-A-M: A-M->>A-M: isSnapshotHeight: true A-M->>+SSES-M: InitiateSnapshot() - SSES-M->>SSES-M: checkNotActive() - SSES-M->>SSES-M: activeOperation = operationDetails{} - SSES-M-)+SSES-AS: go + SSES-M->>+SSEH-M: InitiateExport() + SSEH-M->>SSEH-M: checkNotActive() + SSEH-M->>SSEH-M: activeOperation = operationDetails{} + SSEH-M-)+SSEH-AS: go + SSEH-M-->>-SSES-M: SSES-M-->>-A-M: A-M-->>-TM: par App Snapshot - SSES-AS->>+CM: SWING_STORE_EXPORT/initiate + SSEH-AS->>+CM: SWING_STORE_EXPORT/initiate CM->>+D: MkDir(exportDir) D-->>-CM: CM-)+SSE: initiateSwingStoreExport(exportDir) CM->>CM: await started
(blocking) - CM-->>-SSES-AS: + CM-->>-SSEH-AS: alt not initiated - SSES-AS-)SSES-M: exportStartedResult <- err
close(exportStartedResult) - SSES-AS-)SSES-M: exportDone <- err + SSEH-AS-)SSEH-M: exportStartedResult <- err
close(exportStartedResult) + SSEH-AS-)SSEH-M: exportDone <- err else initiated - SSES-AS-)SSES-M: close(exportStartedResult) + SSEH-AS-)SSEH-M: close(exportStartedResult) alt retrieval + SSEH-AS->>+SSES-AS: OnExportStarted() SSES-AS->>+A-AS: BaseApp.Snapshot() A-AS->>+SM-AS: Create() SM-AS-)+SM-CS: go createSnapshot() @@ -104,40 +110,47 @@ sequenceDiagram end MS-CS-->>-SM-CS: SM-CS->>+SSES-CS: SnapshotExtension() - SSES-CS->>+CM: SWING_STORE_EXPORT/retrieve + SSES-CS->>+SSEH-CS: retrieveExport() + SSEH-CS->>+CM: SWING_STORE_EXPORT/retrieve CM->>CM: await done
(blocking) - CM-->>-SSES-CS: exportDir - SSES-CS->>+D-CS: Read(export-manifest.json) - D-CS-->>-SSES-CS: + CM-->>-SSEH-CS: exportDir + SSEH-CS->>+D-CS: Read(export-manifest.json) + D-CS-->>-SSEH-CS: + SSEH-CS->>+SSES-CS: OnExportRetrieved() loop - SSES-CS->>+D-CS: Read(artifactFile) - D-CS-->>-SSES-CS: - SSES-CS->>+SM-CS: payloadWriter(artifact{name, data}) + SSES-CS->>+SSEH-CS: provider.ReadArtifact() + SSEH-CS->>+D-CS: Read(artifactFile) + D-CS-->>-SSEH-CS: + SSEH-CS-->>-SSES-CS: artifact{name, data} + SSES-CS->>+SM-CS: payloadWriter(artifact) SM-CS-)SM-AS: chunks <- chunk SM-CS-->>-SSES-CS: end - SSES-CS->>+D-CS: Delete(exportDir) - D-CS-->>-SSES-CS: + SSES-CS-->>-SSEH-CS: + SSEH-CS->>+D-CS: Delete(exportDir) + D-CS-->>-SSEH-CS: + SSEH-CS-->>-SSES-CS: SSES-CS-->>-SM-CS: SM-CS-)-SM-AS: close(chunks) SM-AS->>SM-AS: Save() SM-AS-->>-A-AS: A-AS-->>-SSES-AS: + SSES-AS-->>-SSEH-AS: else no retrieval - SSES-AS->>+A-AS: BaseApp.Snapshot() - A-AS-->>-SSES-AS: - SSES-AS->>+CM: SWING_STORE_EXPORT/discard + SSEH-AS->>+SSES-AS: OnExportStarted() + SSES-AS-->>-SSEH-AS: + SSEH-AS->>+CM: SWING_STORE_EXPORT/discard CM-)SSE: Stop() SSE-)CM: done::reject() CM->>CM: await done CM->>+D: Delete(exportDir) D-->-CM: - CM-->>-SSES-AS: - SSES-AS-)SSES-M: exportDone <- err + CM-->>-SSEH-AS: + SSEH-AS-)SSEH-M: exportDone <- err end end - SSES-AS-)SSES-M: close(exportDone) - deactivate SSES-AS + SSEH-AS-)SSEH-M: close(exportDone) + deactivate SSEH-AS end par JS SwingStore export @@ -169,9 +182,9 @@ sequenceDiagram Note over TM, A-M: BeginBlock, EndBlock TM->>+A-M: Commit - A-M->>+SSES-M: WaitUntilSwingStoreExportStarted() - SSES-M->>SSES-M: err = <-exportStartedResult
(blocking) - SSES-M-->>-A-M: + A-M->>+SSEH-M: WaitUntilSwingStoreExportStarted() + SSEH-M->>SSEH-M: err = <-exportStartedResult
(blocking) + SSEH-M-->>-A-M: A-M->>+CM: COMMIT_BLOCK CM->>CM: await started
(blocking) CM->>CM: swingStore.commit() @@ -197,6 +210,7 @@ sequenceDiagram participant SM-CS as Snapshot manager participant MS-CS as MultiStore participant SSES-CS as SwingSet ExtensionSnapshotter + participant SSEH-CS as SwingStoreExportsHandler participant D-CS as Disk end @@ -227,28 +241,33 @@ sequenceDiagram opt loop over extensions SM-CS->>+SSES-CS: RestoreExtension() - SSES-CS->>SSES-CS: checkNotActive() - SSES-CS->>SSES-CS: activeOperation = operationDetails{} - SSES-CS->>+D-CS: MkDir(exportDir) - D-CS-->>-SSES-CS: + SSES-CS->>+SSEH-CS: RestoreExport() + SSEH-CS->>SSEH-CS: checkNotActive() + SSEH-CS->>SSEH-CS: activeOperation = operationDetails{} + SSEH-CS->>+D-CS: MkDir(exportDir) + D-CS-->>-SSEH-CS: + SSEH-CS->>+SSES-CS: provider.GetExportData() SSES-CS->>+MS-CS: ExportStorageFromPrefix
("swingStore.") MS-CS-->>-SSES-CS: vstorage data entries + SSES-CS-->>-SSEH-CS: loop each data entry - SSES-CS->>+D-CS: Append(export-data.jsonl,
"JSON(entry tuple)\n") - D-CS-->>-SSES-CS: + SSEH-CS->>+D-CS: Append(export-data.jsonl,
"JSON(entry tuple)\n") + D-CS-->>-SSEH-CS: end loop extension snapshot items + SSEH-CS->>+SSES-CS: provider.readArtifact() SSES-CS->>+SM-CS: payloadReader() SM-CS->>+SM-M: chunk = <-chunks SM-M-->>-SM-CS: SM-CS-->>-SSES-CS: extension payloadBytes SSES-CS->>SSES-CS: artifact = parse(payloadBytes) - SSES-CS->>+D-CS: Write(sanitizedFilename, artifact.data) - D-CS-->>-SSES-CS: + SSES-CS->>-SSEH-CS: artifact + SSEH-CS->>+D-CS: Write(sanitizedFilename, artifact.data) + D-CS-->>-SSEH-CS: end - SSES-CS->>+D-CS: Write(export-manifest.jsonl, manifest) - D-CS-->>-SSES-CS: - SSES-CS->>+CM: SWING_STORE_EXPORT/restore + SSEH-CS->>+D-CS: Write(export-manifest.jsonl, manifest) + D-CS-->>-SSEH-CS: + SSEH-CS->>+CM: SWING_STORE_EXPORT/restore CM->>+SSI: performStateSyncImport() SSI->>+D: Read(export-manifest.json) D-->>-SSI: @@ -274,9 +293,10 @@ sequenceDiagram SSI->>+SS: set(host.blockHeight) SS-->>-SSI: SSI-->>-CM: - CM-->>-SSES-CS: - SSES-CS->>+D-CS: Delete(exportDir) - D-CS-->>-SSES-CS: + CM-->>-SSEH-CS: + SSEH-CS->>+D-CS: Delete(exportDir) + D-CS-->>-SSEH-CS: + SSEH-CS-->>-SSES-CS: SSES-CS-->>-SM-CS: end SM-CS-)-SM-M: chRestoreDone <- restoreDone{}
close(chRestoreDone) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 3fae93e8d1a..44d0d7bda39 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -233,12 +233,13 @@ type GaiaApp struct { // nolint: golint FeeGrantKeeper feegrantkeeper.Keeper AuthzKeeper authzkeeper.Keeper - SwingSetKeeper swingset.Keeper - SwingSetSnapshotter swingset.Snapshotter - VstorageKeeper vstorage.Keeper - VibcKeeper vibc.Keeper - VbankKeeper vbank.Keeper - LienKeeper lien.Keeper + SwingStoreExportsHandler swingset.SwingStoreExportsHandler + SwingSetSnapshotter swingset.ExtensionSnapshotter + SwingSetKeeper swingset.Keeper + VstorageKeeper vstorage.Keeper + VibcKeeper vibc.Keeper + VbankKeeper vbank.Keeper + LienKeeper lien.Keeper // make scoped keepers public for test purposes ScopedIBCKeeper capabilitykeeper.ScopedKeeper @@ -457,9 +458,8 @@ func NewAgoricApp( callToController, ) - app.SwingSetSnapshotter = swingsetkeeper.NewSwingsetSnapshotter( - bApp, - app.SwingSetKeeper.ExportSwingStore, + app.SwingStoreExportsHandler = *swingsetkeeper.NewSwingStoreExportsHandler( + app.Logger(), func(action vm.Jsonable, mustNotBeInited bool) (string, error) { if mustNotBeInited { app.CheckControllerInited(false) @@ -472,6 +472,11 @@ func NewAgoricApp( return sendToController(true, string(bz)) }, ) + app.SwingSetSnapshotter = *swingsetkeeper.NewExtensionSnapshotter( + bApp, + &app.SwingStoreExportsHandler, + app.SwingSetKeeper.ExportSwingStore, + ) app.VibcKeeper = vibc.NewKeeper( appCodec, keys[vibc.StoreKey], diff --git a/golang/cosmos/x/swingset/alias.go b/golang/cosmos/x/swingset/alias.go index 9c4b80c96e8..117a284a1c6 100644 --- a/golang/cosmos/x/swingset/alias.go +++ b/golang/cosmos/x/swingset/alias.go @@ -21,10 +21,11 @@ var ( ) type ( - Keeper = keeper.Keeper - Snapshotter = keeper.SwingsetSnapshotter - Egress = types.Egress - MsgDeliverInbound = types.MsgDeliverInbound - MsgProvision = types.MsgProvision - Params = types.Params + Keeper = keeper.Keeper + SwingStoreExportsHandler = keeper.SwingStoreExportsHandler + ExtensionSnapshotter = keeper.ExtensionSnapshotter + Egress = types.Egress + MsgDeliverInbound = types.MsgDeliverInbound + MsgProvision = types.MsgProvision + Params = types.Params ) diff --git a/golang/cosmos/x/swingset/keeper/snapshotter.go b/golang/cosmos/x/swingset/keeper/snapshotter.go index ce785c147b6..71315d11924 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter.go @@ -1,6 +1,7 @@ package keeper import ( + "bytes" "encoding/json" "errors" "fmt" @@ -26,7 +27,8 @@ import ( // See docs/architecture/state-sync.md for a sequence diagram of how this // module fits within the state-sync process. -var _ snapshots.ExtensionSnapshotter = &SwingsetSnapshotter{} +var _ snapshots.ExtensionSnapshotter = &ExtensionSnapshotter{} +var _ SwingStoreExportEventHandler = &ExtensionSnapshotter{} // SnapshotFormat 1 defines all extension payloads to be SwingStoreArtifact proto messages const SnapshotFormat = 1 @@ -59,10 +61,10 @@ const ExportManifestFilename = "export-manifest.json" // JS and golang handle such extra whitespace. const exportDataFilename = "export-data.jsonl" -// UntrustedExportDataArtifactName is a special artifact name to indicate the -// presence of a synthetic artifact containing untrusted "export data". This -// artifact must not end up in the list of artifacts imported by the JS import -// tooling (which would fail). +// UntrustedExportDataArtifactName is a special artifact name that the provider +// and consumer of an export can use to indicate the presence of a synthetic +// artifact containing untrusted "export data". This artifact must not end up in +// the list of artifacts imported by the JS import tooling (which would fail). const UntrustedExportDataArtifactName = "UNTRUSTED-EXPORT-DATA" const untrustedExportDataFilename = "untrusted-export-data.jsonl" @@ -137,14 +139,13 @@ type operationDetails struct { // channel. exportStartedResult chan error // exportRetrieved is an internal flag indicating whether the JS generated - // the "retrieve" blockingSend was performed or not, and used to control - // whether to send a "discard" request if the JS side stayed responsible for - // the generated but un-retrieved export. + // export was retrieved. It can be false regardless of the component's + // eventHandler reporting an error or not. It is only indicative of whether + // the component called retrieveExport, and used to control whether to send + // a discard request if the JS side stayed responsible for the generated but + // un-retrieved export. // It is only read or written by the export operation's goroutine. exportRetrieved bool - // Internal plumbing of any error that happen during `SnapshotExtension` - // Only read or written by the snapshot worker goroutine. - retrieveError error // exportDone is a channel that is closed when the active export operation // is complete. // It is assigned at creation and never mutated. The started goroutine @@ -156,13 +157,13 @@ type operationDetails struct { // activeOperation is a global variable reflecting a swing-store import or // export in progress on the JS side. // This variable is only assigned to through calls of the public methods of -// SwingsetSnapshotter, which rely on the exportDone channel getting +// SwingStoreExportsHandler, which rely on the exportDone channel getting // closed to nil this variable. -// Only the calls to InitiateSnapshot and RestoreSnapshot set this to a non-nil +// Only the calls to InitiateExport and RestoreExport set this to a non-nil // value. The goroutine in which these calls occur is referred to as the // "main goroutine". That goroutine may be different over time, but it's the // caller's responsibility to ensure those goroutines do not overlap calls to -// the SwingsetSnapshotter public methods. +// the SwingStoreExportsHandler public methods. // See also the details of each field for the conditions under which they are // accessed. var activeOperation *operationDetails @@ -174,7 +175,7 @@ var activeOperation *operationDetails // this method before sending a commit action to the JS controller. // // Waits for a just initiated export operation to have started in its goroutine. -// If no operation is in progress (InitiateSnapshot hasn't been called or +// If no operation is in progress (InitiateExport hasn't been called or // already completed), or if we previously checked if the operation had started, // returns immediately. // @@ -211,13 +212,14 @@ func WaitUntilSwingStoreExportStarted() error { // WaitUntilSwingStoreExportDone synchronizes with the completion of an export // operation in progress, if any. // Only a single swing-store operation may execute at a time. Calling -// InitiateSnapshot or RestoreSnapshot will fail if a swing-store operation is +// InitiateExport or RestoreExport will fail if a swing-store operation is // already in progress. Furthermore, a component may need to know once an // export it initiated has completed. Once this method call returns, the -// goroutine is guaranteed to have terminated. +// goroutine is guaranteed to have terminated, and the SwingStoreExportEventHandler +// provided to InitiateExport to no longer be in use. // -// Reports any error that may have occurred from InitiateSnapshot. -// If no export operation is in progress (InitiateSnapshot hasn't been called or +// Reports any error that may have occurred from InitiateExport. +// If no export operation is in progress (InitiateExport hasn't been called or // already completed), or if we previously checked if an export had completed, // returns immediately. // @@ -258,48 +260,123 @@ func checkNotActive() error { return nil } -type SwingsetSnapshotter struct { - isConfigured func() bool - takeSnapshot func(height int64) - newRestoreContext func(height int64) sdk.Context - logger log.Logger - getSwingStoreExportData func(ctx sdk.Context) []*vstoragetypes.DataEntry - blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error) +// snapshotDetails describes an in-progress state-sync snapshot +type snapshotDetails struct { + // blockHeight is the block height of this in-progress snapshot. + blockHeight uint64 + // logger is the destination for this snapshot's log messages. + logger log.Logger + // retrieveExport is the callback provided by the SwingStoreExportsHandler to + // retrieve the SwingStore's export provider which allows to read the export's + // artifacts used to populate this state-sync extension's payloads. + retrieveExport func() error + // payloadWriter is the callback provided by the state-sync snapshot manager + // for an extension to write a payload into the under-construction snapshot + // stream. It may be called multiple times, and often is (currently once per + // SwingStore export artifact). + payloadWriter snapshots.ExtensionPayloadWriter } -// NewSwingsetSnapshotter creates a SwingsetSnapshotter which exclusively -// manages communication with the JS side for Swingset snapshots, ensuring -// insensitivity to sub-block timing, and enforcing concurrency requirements. -// The caller of this submodule must arrange block level commit synchronization, -// to ensure the results are deterministic. +// SwingStoreExportProvider gives access to a SwingStore "export data" and the +// related artifacts. +// A JS swing-store export is composed of optional "export data" (a set of +// key/value pairs), and opaque artifacts (a name and data as bytes) that +// complement the "export data". +// The abstraction is similar to the JS side swing-store export abstraction, +// but without the ability to list artifacts or random access them. // -// Some `blockingSend` calls performed by this submodule are non-deterministic. -// This submodule will send messages to JS from goroutines at unpredictable -// times, but this is safe because when handling the messages, the JS side -// does not perform operations affecting consensus and ignores state changes -// since committing the previous block. -// Some other `blockingSend` calls however do change the JS swing-store and -// must happen before the Swingset controller on the JS side was inited. -func NewSwingsetSnapshotter( +// A swing-store export for creating a state-sync snapshot will not contain any +// "export data" since this information is reflected every block into the +// verified cosmos DB. +// On state-sync snapshot restore, the swingset ExtensionSnapshotter will +// synthesize a provider for this module with "export data" sourced from the +// restored cosmos DB, and artifacts from the extension's payloads. When +// importing, the JS swing-store will verify that the artifacts match hashes +// contained in the trusted "export data". +type SwingStoreExportProvider struct { + // BlockHeight is the block height of the SwingStore export. + BlockHeight uint64 + // GetExportData is a function to return the "export data" of the SwingStore export, if any. + GetExportData func() ([]*vstoragetypes.DataEntry, error) + // ReadArtifact is a function to return the next unread artifact in the SwingStore export. + // It errors with io.EOF upon reaching the end of the artifact list. + ReadArtifact func() (types.SwingStoreArtifact, error) +} + +// SwingStoreExportEventHandler is used to handle events that occur while generating +// a swing-store export. It is provided to SwingStoreExportsHandler.InitiateExport. +type SwingStoreExportEventHandler interface { + // OnExportStarted is called by InitiateExport in a goroutine after the + // swing-store export has successfully started. + // This is where the component performing the export must initiate its own + // off main goroutine work, which results in retrieving and processing the + // swing-store export. + // + // Must call the retrieveExport function before returning, which will in turn + // synchronously invoke OnExportRetrieved once the swing-store export is ready. + OnExportStarted(blockHeight uint64, retrieveExport func() error) error + // OnExportRetrieved is called when the swing-store export has been retrieved, + // during the retrieveExport invocation. + // The provider is not a return value to retrieveExport in order to + // report errors in components that are unable to propagate errors back to the + // OnExportStarted result, like cosmos state-sync ExtensionSnapshotter. + // The implementation must synchronously consume the provider, which becomes + // invalid after the method returns. + OnExportRetrieved(provider SwingStoreExportProvider) error +} + +// ExtensionSnapshotter is the cosmos state-sync extension snapshotter for the +// x/swingset module. +// It handles the SwingSet state that is not part of the Cosmos DB. Currently +// that state is solely composed of the SwingStore artifacts, as a copy of the +// SwingStore "export data" is streamed into the cosmos DB during execution. +// When performing a snapshot, the extension leverages the SwingStoreExportsHandler +// to retrieve the needed SwingStore artifacts. When restoring a snapshot, +// the extension combines the artifacts from the state-sync snapshot with the +// SwingStore "export data" from the already restored cosmos DB, to produce a +// full SwingStore export that can be imported to create a new JS swing-store DB. +// +// Since swing-store is not able to open its DB at historical commit points, +// the export operation must start before new changes are committed, aka before +// Swingset is instructed to commit the next block. For that reason the cosmos +// snapshot operation is currently mediated by the SwingStoreExportsHandler, +// which helps with the synchronization needed to generate consistent exports, +// while allowing SwingSet activity to proceed for the next block. This relies +// on the application calling WaitUntilSwingStoreExportStarted before +// instructing SwingSet to commit a new block. +type ExtensionSnapshotter struct { + isConfigured func() bool + // takeAppSnapshot is called by OnExportStarted when creating a snapshot + takeAppSnapshot func(height int64) + newRestoreContext func(height int64) sdk.Context + swingStoreExportsHandler *SwingStoreExportsHandler + getSwingStoreExportDataShadowCopy func(ctx sdk.Context) []*vstoragetypes.DataEntry + logger log.Logger + activeSnapshot *snapshotDetails +} + +// NewExtensionSnapshotter creates a new swingset ExtensionSnapshotter +func NewExtensionSnapshotter( app *baseapp.BaseApp, - getSwingStoreExportData func(ctx sdk.Context) []*vstoragetypes.DataEntry, - blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error), -) SwingsetSnapshotter { - return SwingsetSnapshotter{ - isConfigured: func() bool { return app.SnapshotManager() != nil }, - takeSnapshot: app.Snapshot, + swingStoreExportsHandler *SwingStoreExportsHandler, + getSwingStoreExportDataShadowCopy func(ctx sdk.Context) []*vstoragetypes.DataEntry, +) *ExtensionSnapshotter { + return &ExtensionSnapshotter{ + isConfigured: func() bool { return app.SnapshotManager() != nil }, + takeAppSnapshot: app.Snapshot, newRestoreContext: func(height int64) sdk.Context { return app.NewUncachedContext(false, tmproto.Header{Height: height}) }, - logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "snapshotter"), - getSwingStoreExportData: getSwingStoreExportData, - blockingSend: blockingSend, + logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "extension snapshotter"), + swingStoreExportsHandler: swingStoreExportsHandler, + getSwingStoreExportDataShadowCopy: getSwingStoreExportDataShadowCopy, + activeSnapshot: nil, } } // SnapshotName returns the name of the snapshotter, it should be unique in the manager. // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SnapshotName() string { +func (snapshotter *ExtensionSnapshotter) SnapshotName() string { return types.ModuleName } @@ -307,27 +384,53 @@ func (snapshotter *SwingsetSnapshotter) SnapshotName() string { // extension payloads when creating a snapshot. It's independent of the format // used for the overall state-sync snapshot. // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SnapshotFormat() uint32 { +func (snapshotter *ExtensionSnapshotter) SnapshotFormat() uint32 { return SnapshotFormat } // SupportedFormats returns a list of extension specific payload formats it can // restore from. // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SupportedFormats() []uint32 { +func (snapshotter *ExtensionSnapshotter) SupportedFormats() []uint32 { return []uint32{SnapshotFormat} } +// SwingStoreExportsHandler exclusively manages the communication with the JS side +// related to swing-store exports, ensuring insensitivity to sub-block timing, +// and enforcing concurrency requirements. +// The caller of this submodule must arrange block level commit synchronization, +// to ensure the results are deterministic. +// +// Some blockingSend calls performed by this submodule are non-deterministic. +// This submodule will send messages to JS from goroutines at unpredictable +// times, but this is safe because when handling the messages, the JS side +// does not perform operations affecting consensus and ignores state changes +// since committing the previous block. +// Some other blockingSend calls however do change the JS swing-store and +// must happen before the Swingset controller on the JS side was inited, in +// which case the mustNotBeInited parameter will be set to true. +type SwingStoreExportsHandler struct { + logger log.Logger + blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error) +} + +// NewSwingStoreExportsHandler creates a SwingStoreExportsHandler +func NewSwingStoreExportsHandler(logger log.Logger, blockingSend func(action vm.Jsonable, mustNotBeInited bool) (string, error)) *SwingStoreExportsHandler { + return &SwingStoreExportsHandler{ + logger: logger.With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "SwingStoreExportsHandler"), + blockingSend: blockingSend, + } +} + // InitiateSnapshot initiates a snapshot for the given block height. // If a snapshot is already in progress, or if no snapshot manager is // configured, this will fail. // // The snapshot operation is performed in a goroutine. // Use WaitUntilSwingStoreExportStarted to synchronize commit boundaries. -func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { - err := checkNotActive() - if err != nil { - return err +func (snapshotter *ExtensionSnapshotter) InitiateSnapshot(height int64) error { + if !snapshotter.isConfigured() { + return fmt.Errorf("snapshot manager not configured") } if height <= 0 { return fmt.Errorf("block height must not be negative or 0") @@ -335,11 +438,30 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { blockHeight := uint64(height) - if !snapshotter.isConfigured() { - return fmt.Errorf("snapshot manager not configured") + return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter) +} + +// InitiateExport synchronously verifies that there is not already an export or +// import operation in progress and initiates a new export in a goroutine, +// via a dedicated SWING_STORE_EXPORT blockingSend action independent of other +// block related blockingSends, calling the given eventHandler when a related +// blockingSend completes. If the eventHandler doesn't retrieve the export, +// then it sends another blockingSend action to discard it. +// +// eventHandler is invoked solely from the spawned goroutine. +// The "started" and "done" events can be used for synchronization with an +// active operation taking place in the goroutine, by calling respectively the +// WaitUntilSwingStoreExportStarted and WaitUntilSwingStoreExportDone methods +// from the goroutine that initiated the export. +// +// Must be called by the main goroutine +func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64, eventHandler SwingStoreExportEventHandler) error { + err := checkNotActive() + if err != nil { + return err } - logger := snapshotter.logger.With("height", blockHeight) + logger := exportsHandler.logger.With("height", blockHeight) // Indicate that an export operation has been initiated by setting the global // activeOperation var. @@ -385,7 +507,7 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { } // blockingSend for SWING_STORE_EXPORT action is safe to call from a goroutine - _, startedErr = snapshotter.blockingSend(initiateAction, false) + _, startedErr = exportsHandler.blockingSend(initiateAction, false) if startedErr != nil { logger.Error("failed to initiate swing-store export", "err", startedErr) @@ -398,25 +520,41 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { // Calls to WaitUntilSwingStoreExportStarted will no longer block. close(operationDetails.exportStartedResult) - // In production this should indirectly call SnapshotExtension(). - snapshotter.takeSnapshot(height) + // The user provided OnExportStarted function should call retrieveExport() + var retrieveErr error + err = eventHandler.OnExportStarted(blockHeight, func() error { + activeOperationDetails := activeOperation + if activeOperationDetails != operationDetails || operationDetails.exportRetrieved { + // shouldn't happen, but return an error if it does + return errors.New("export operation no longer active") + } + + retrieveErr = exportsHandler.retrieveExport(eventHandler.OnExportRetrieved) - // Restore any retrieve error swallowed by `takeSnapshot` - err = activeOperation.retrieveError + return retrieveErr + }) + + // Restore any retrieve error swallowed by OnExportStarted + if err == nil { + err = retrieveErr + } if err != nil { logger.Error("failed to process swing-store export", "err", err) } - // Check whether the JS generated export was retrieved by SnapshotExtension + // Check whether the JS generated export was retrieved by eventHandler if operationDetails.exportRetrieved { return } + // Discarding the export so invalidate retrieveExport + operationDetails.exportRetrieved = true + discardAction := &swingStoreDiscardExportAction{ Type: swingStoreExportActionType, Request: discardRequest, } - _, discardErr := snapshotter.blockingSend(discardAction, false) + _, discardErr := exportsHandler.blockingSend(discardAction, false) if discardErr != nil { logger.Error("failed to discard swing-store export", "err", err) @@ -434,45 +572,101 @@ func (snapshotter *SwingsetSnapshotter) InitiateSnapshot(height int64) error { return nil } +// OnExportStarted performs the actual cosmos state-sync app snapshot. +// The cosmos implementation will ultimately call SnapshotExtension, which can +// retrieve and process the SwingStore artifacts. +// This method is invoked by the SwingStoreExportsHandler in a goroutine +// started by InitiateExport, only if no other SwingStore export operation is +// already in progress. +// +// Implements SwingStoreExportEventHandler +func (snapshotter *ExtensionSnapshotter) OnExportStarted(blockHeight uint64, retrieveExport func() error) error { + logger := snapshotter.logger.With("height", blockHeight) + + if blockHeight > math.MaxInt64 { + return fmt.Errorf("snapshot block height %d is higher than max int64", blockHeight) + } + height := int64(blockHeight) + + // We assume SwingStoreSnapshotter correctly guarded against concurrent snapshots + snapshotDetails := snapshotDetails{ + blockHeight: blockHeight, + logger: logger, + retrieveExport: retrieveExport, + } + snapshotter.activeSnapshot = &snapshotDetails + + snapshotter.takeAppSnapshot(height) + + snapshotter.activeSnapshot = nil + + // Unfortunately Cosmos BaseApp.Snapshot() does not report its errors. + return nil +} + // SnapshotExtension is the method invoked by cosmos to write extension payloads // into the underlying protobuf stream of the state-sync snapshot. // This method is invoked by the cosmos snapshot manager in a goroutine it -// started during the call to takeAppSnapshot. However the snapshot manager -// fully synchronizes its goroutine with the goroutine started by this -// SwingsetSnapshotter. +// started during the call to OnExportStarted. However the snapshot manager +// fully synchronizes its goroutine with the goroutine started by the +// SwingStoreSnapshotter, making it safe to invoke callbacks of the +// SwingStoreSnapshotter. SnapshotExtension actually delegates writing +// extension payloads to OnExportRetrieved. // // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, payloadWriter snapshots.ExtensionPayloadWriter) (err error) { - defer func() { - // Since the cosmos layers do a poor job of reporting errors, do our own reporting - // `err` will be set correctly regardless if it was explicitly assigned or - // a value was provided to a `return` statement. - // See https://go.dev/blog/defer-panic-and-recover for details - if err != nil { - operationDetails := activeOperation - if operationDetails != nil { - operationDetails.retrieveError = err - } else { - snapshotter.logger.Error("swingset snapshot extension failed", "err", err) - } - } - }() +func (snapshotter *ExtensionSnapshotter) SnapshotExtension(blockHeight uint64, payloadWriter snapshots.ExtensionPayloadWriter) error { + logError := func(err error) error { + // The cosmos layers do a poor job of reporting errors, however + // SwingStoreExportsHandler arranges to report retrieve errors swallowed by + // takeAppSnapshot, so we manually report unexpected errors. + snapshotter.logger.Error("swingset snapshot extension failed", "err", err) + return err + } + + snapshotDetails := snapshotter.activeSnapshot + if snapshotDetails == nil { + // shouldn't happen, but return an error if it does + return logError(errors.New("no active swingset snapshot")) + } + + if snapshotDetails.blockHeight != blockHeight { + return logError(fmt.Errorf("swingset extension snapshot requested for unexpected height %d (expected %d)", blockHeight, snapshotDetails.blockHeight)) + } + + snapshotDetails.payloadWriter = payloadWriter + return snapshotDetails.retrieveExport() +} + +// retrieveExport retrieves an initiated export then invokes onExportRetrieved +// with the retrieved export. +// +// It performs a SWING_STORE_EXPORT blockingSend which on success returns a +// string of the directory containing the JS swing-store export. It then reads +// the export manifest generated by the JS side, and synthesizes a +// SwingStoreExportProvider for the onExportRetrieved callback to access the +// retrieved swing-store export. +// The export manifest format is described by the exportManifest struct. +// +// After calling onExportRetrieved, the export directory and its contents are +// deleted. +// +// This will block until the export is ready. Internally invoked by the +// InitiateExport logic in the export operation's goroutine. +func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved func(provider SwingStoreExportProvider) error) (err error) { operationDetails := activeOperation if operationDetails == nil { // shouldn't happen, but return an error if it does return errors.New("no active swing-store export operation") } - if operationDetails.blockHeight != blockHeight { - return fmt.Errorf("swingset extension snapshot requested for unexpected height %d (expected %d)", blockHeight, operationDetails.blockHeight) - } + blockHeight := operationDetails.blockHeight action := &swingStoreRetrieveExportAction{ Type: swingStoreExportActionType, Request: retrieveRequest, } - out, err := snapshotter.blockingSend(action, false) + out, err := exportsHandler.blockingSend(action, false) if err != nil { return err @@ -502,20 +696,99 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, blockHeight) } - writeFileToPayload := func(fileName string, artifactName string) error { - artifact := types.SwingStoreArtifact{Name: artifactName} + getExportData := func() ([]*vstoragetypes.DataEntry, error) { + entries := []*vstoragetypes.DataEntry{} + if manifest.Data == "" { + return entries, nil + } - artifact.Data, err = os.ReadFile(filepath.Join(exportDir, fileName)) + dataFile, err := os.Open(filepath.Join(exportDir, manifest.Data)) if err != nil { - return err + return nil, err } + defer dataFile.Close() + + decoder := json.NewDecoder(dataFile) + for { + var jsonEntry []string + err = decoder.Decode(&jsonEntry) + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + + if len(jsonEntry) != 2 { + return nil, fmt.Errorf("invalid export data entry (length %d)", len(jsonEntry)) + } + entry := vstoragetypes.DataEntry{Path: jsonEntry[0], Value: jsonEntry[1]} + entries = append(entries, &entry) + } + + return entries, nil + } + + nextArtifact := 0 + + readArtifact := func() (artifact types.SwingStoreArtifact, err error) { + if nextArtifact == len(manifest.Artifacts) { + return artifact, io.EOF + } else if nextArtifact > len(manifest.Artifacts) { + return artifact, fmt.Errorf("exceeded expected artifact count: %d > %d", nextArtifact, len(manifest.Artifacts)) + } + + artifactEntry := manifest.Artifacts[nextArtifact] + nextArtifact++ + + artifactName := artifactEntry[0] + fileName := artifactEntry[1] + if artifactName == UntrustedExportDataArtifactName { + return artifact, fmt.Errorf("unexpected export artifact name %s", artifactName) + } + artifact.Name = artifactName + artifact.Data, err = os.ReadFile(filepath.Join(exportDir, fileName)) + + return artifact, err + } + + err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}) + if err != nil { + return err + } + + // if nextArtifact != len(manifest.Artifacts) { + // return errors.New("not all export artifacts were retrieved") + // } + + operationDetails.logger.Info("retrieved swing-store export", "exportDir", exportDir) + return nil +} + +// OnExportRetrieved handles the SwingStore export retrieved by the SwingStoreExportsHandler +// and writes it out to the SnapshotExtension's payloadWriter. +// This operation is invoked by the SwingStoreExportsHandler in the snapshot +// manager goroutine synchronized with SwingStoreExportsHandler's own goroutine. +// +// Implements SwingStoreExportEventHandler +func (snapshotter *ExtensionSnapshotter) OnExportRetrieved(provider SwingStoreExportProvider) error { + snapshotDetails := snapshotter.activeSnapshot + if snapshotDetails == nil || snapshotDetails.payloadWriter == nil { + // shouldn't happen, but return an error if it does + return errors.New("no active swingset snapshot") + } + + if snapshotDetails.blockHeight != provider.BlockHeight { + return fmt.Errorf("SwingStore export received for unexpected block height %d (app snapshot height is %d)", provider.BlockHeight, snapshotDetails.blockHeight) + } + + writeArtifactToPayload := func(artifact types.SwingStoreArtifact) error { payloadBytes, err := artifact.Marshal() if err != nil { return err } - err = payloadWriter(payloadBytes) + err = snapshotDetails.payloadWriter(payloadBytes) if err != nil { return err } @@ -523,34 +796,57 @@ func (snapshotter *SwingsetSnapshotter) SnapshotExtension(blockHeight uint64, pa return nil } - for _, artifactInfo := range manifest.Artifacts { - artifactName := artifactInfo[0] - fileName := artifactInfo[1] - if artifactName == UntrustedExportDataArtifactName { - return fmt.Errorf("unexpected artifact name %s", artifactName) + for { + artifact, err := provider.ReadArtifact() + if err == io.EOF { + break + } else if err != nil { + return err } - err = writeFileToPayload(fileName, artifactName) + + err = writeArtifactToPayload(artifact) if err != nil { return err } } - if manifest.Data != "" { - err = writeFileToPayload(manifest.Data, UntrustedExportDataArtifactName) + swingStoreExportDataEntries, err := provider.GetExportData() + if err != nil { + return err + } + if len(swingStoreExportDataEntries) == 0 { + return nil + } + + // For debugging, write out any retrieved export data as a single untrusted artifact + // which has the same encoding as the internal SwingStore export data representation: + // a sequence of [key, value] JSON arrays each terminated by a new line. + exportDataArtifact := types.SwingStoreArtifact{Name: UntrustedExportDataArtifactName} + + var encodedExportData bytes.Buffer + encoder := json.NewEncoder(&encodedExportData) + encoder.SetEscapeHTML(false) + for _, dataEntry := range swingStoreExportDataEntries { + entry := []string{dataEntry.Path, dataEntry.Value} + err := encoder.Encode(entry) if err != nil { return err } } + exportDataArtifact.Data = encodedExportData.Bytes() - operationDetails.logger.Info("retrieved swing-store export", "exportDir", exportDir) - + err = writeArtifactToPayload(exportDataArtifact) + encodedExportData.Reset() + if err != nil { + return err + } return nil } // RestoreExtension restores an extension state snapshot, // the payload reader returns io.EOF when it reaches the extension boundaries. // Implements ExtensionSnapshotter -func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, format uint32, payloadReader snapshots.ExtensionPayloadReader) error { +func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, format uint32, payloadReader snapshots.ExtensionPayloadReader) error { if format != SnapshotFormat { return snapshots.ErrUnknownFormat } @@ -560,18 +856,49 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for } height := int64(blockHeight) + // Retrieve the SwingStore "ExportData" from the verified vstorage data. + // At this point the content of the cosmos DB has been verified against the + // AppHash, which means the SwingStore data it contains can be used as the + // trusted root against which to validate the artifacts. + getExportData := func() ([]*vstoragetypes.DataEntry, error) { + ctx := snapshotter.newRestoreContext(height) + exportData := snapshotter.getSwingStoreExportDataShadowCopy(ctx) + return exportData, nil + } + + readArtifact := func() (artifact types.SwingStoreArtifact, err error) { + payloadBytes, err := payloadReader() + if err != nil { + return artifact, err + } + + err = artifact.Unmarshal(payloadBytes) + return artifact, err + } + + return snapshotter.swingStoreExportsHandler.RestoreExport( + SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}, + ) +} + +// RestoreExport restores the JS swing-store using previously exported data and artifacts. +// +// Must be called by the main goroutine +func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStoreExportProvider) error { err := checkNotActive() if err != nil { return err } - // We technically don't need to create an active snapshot here since both - // `InitiateSnapshot` and `RestoreExtension` should only be called from the - // main thread, but it doesn't cost much to add in case things go wrong. + blockHeight := provider.BlockHeight + + // We technically don't need to create an active operation here since both + // InitiateExport and RestoreExport should only be called from the main + // goroutine, but it doesn't cost much to add in case things go wrong. operationDetails := &operationDetails{ isRestore: true, blockHeight: blockHeight, - logger: snapshotter.logger, + logger: exportsHandler.logger, // goroutine synchronization is unnecessary since anything checking should // be called from the same goroutine. // Effectively WaitUntilSwingStoreExportStarted would block infinitely and @@ -584,7 +911,7 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for activeOperation = nil }() - exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-state-sync-restore-%d-*", blockHeight)) + exportDir, err := os.MkdirTemp("", fmt.Sprintf("agd-swing-store-restore-%d-*", blockHeight)) if err != nil { return err } @@ -594,12 +921,10 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for BlockHeight: blockHeight, } - // Retrieve the SwingStore "ExportData" from the verified vstorage data. - // At this point the content of the cosmos DB has been verified against the - // AppHash, which means the SwingStore data it contains can be used as the - // trusted root against which to validate the artifacts. - ctx := snapshotter.newRestoreContext(height) - exportDataEntries := snapshotter.getSwingStoreExportData(ctx) + exportDataEntries, err := provider.GetExportData() + if err != nil { + return err + } if len(exportDataEntries) > 0 { manifest.Data = exportDataFilename @@ -630,18 +955,13 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for } for { - payloadBytes, err := payloadReader() + artifact, err := provider.ReadArtifact() if err == io.EOF { break } else if err != nil { return err } - artifact := types.SwingStoreArtifact{} - if err = artifact.Unmarshal(payloadBytes); err != nil { - return err - } - if artifact.Name != UntrustedExportDataArtifactName { // An artifact is only verifiable by the JS swing-store import using the // information contained in the "export data". @@ -681,12 +1001,12 @@ func (snapshotter *SwingsetSnapshotter) RestoreExtension(blockHeight uint64, for Args: [1]string{exportDir}, } - _, err = snapshotter.blockingSend(action, true) + _, err = exportsHandler.blockingSend(action, true) if err != nil { return err } - snapshotter.logger.Info("restored snapshot", "exportDir", exportDir, "height", blockHeight) + exportsHandler.logger.Info("restored swing-store export", "exportDir", exportDir, "height", blockHeight) return nil } diff --git a/golang/cosmos/x/swingset/keeper/snapshotter_test.go b/golang/cosmos/x/swingset/keeper/snapshotter_test.go index f4f6dd64015..884b286e566 100644 --- a/golang/cosmos/x/swingset/keeper/snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/snapshotter_test.go @@ -10,24 +10,64 @@ import ( "github.com/tendermint/tendermint/libs/log" ) -func newTestSnapshotter() SwingsetSnapshotter { +func newTestExtensionSnapshotter() *ExtensionSnapshotter { logger := log.NewNopLogger() // log.NewTMLogger(log.NewSyncWriter( /* os.Stdout*/ io.Discard)).With("module", "sdk/app") - return SwingsetSnapshotter{ - isConfigured: func() bool { return true }, - takeSnapshot: func(height int64) {}, - newRestoreContext: func(height int64) sdk.Context { return sdk.Context{} }, - logger: logger, - blockingSend: func(action vm.Jsonable, mustNotBeInited bool) (string, error) { return "", nil }, + return &ExtensionSnapshotter{ + isConfigured: func() bool { return true }, + newRestoreContext: func(height int64) sdk.Context { return sdk.Context{} }, + logger: logger, + swingStoreExportsHandler: newTestSwingStoreExportsHandler(), } } -func TestSnapshotInProgress(t *testing.T) { - swingsetSnapshotter := newTestSnapshotter() +func newTestSwingStoreExportsHandler() *SwingStoreExportsHandler { + logger := log.NewNopLogger() // log.NewTMLogger(log.NewSyncWriter( /* os.Stdout*/ io.Discard)).With("module", "sdk/app") + return &SwingStoreExportsHandler{ + logger: logger, + blockingSend: func(action vm.Jsonable, mustNotBeInited bool) (string, error) { return "", nil }, + } +} + +var _ SwingStoreExportEventHandler = testSwingStoreEventHandler{} + +type testSwingStoreEventHandler struct { + onExportStarted func(height uint64, retrieveExport func() error) error + onExportRetrieved func(provider SwingStoreExportProvider) error +} + +func newTestSwingStoreEventHandler() testSwingStoreEventHandler { + return testSwingStoreEventHandler{ + onExportStarted: func(height uint64, retrieveExport func() error) error { + return retrieveExport() + }, + onExportRetrieved: func(provider SwingStoreExportProvider) error { + for { + _, err := provider.ReadArtifact() + if err == io.EOF { + return nil + } else if err != nil { + return err + } + } + }, + } +} + +func (taker testSwingStoreEventHandler) OnExportStarted(height uint64, retrieveExport func() error) error { + return taker.onExportStarted(height, retrieveExport) +} + +func (taker testSwingStoreEventHandler) OnExportRetrieved(provider SwingStoreExportProvider) error { + return taker.onExportRetrieved(provider) +} + +func TestExtensionSnapshotterInProgress(t *testing.T) { + extensionSnapshotter := newTestExtensionSnapshotter() ch := make(chan struct{}) - swingsetSnapshotter.takeSnapshot = func(height int64) { + extensionSnapshotter.takeAppSnapshot = func(height int64) { <-ch } - err := swingsetSnapshotter.InitiateSnapshot(123) + err := extensionSnapshotter.InitiateSnapshot(123) if err != nil { t.Fatal(err) } @@ -36,12 +76,12 @@ func TestSnapshotInProgress(t *testing.T) { t.Fatal(err) } - err = swingsetSnapshotter.InitiateSnapshot(456) + err = extensionSnapshotter.InitiateSnapshot(456) if err == nil { t.Error("wanted error for snapshot in progress") } - err = swingsetSnapshotter.RestoreExtension( + err = extensionSnapshotter.RestoreExtension( 456, SnapshotFormat, func() ([]byte, error) { return nil, io.EOF @@ -56,7 +96,7 @@ func TestSnapshotInProgress(t *testing.T) { t.Fatal(err) } - err = swingsetSnapshotter.InitiateSnapshot(456) + err = extensionSnapshotter.InitiateSnapshot(456) if err != nil { t.Fatal(err) } @@ -66,21 +106,63 @@ func TestSnapshotInProgress(t *testing.T) { } } -func TestNotConfigured(t *testing.T) { - swingsetSnapshotter := newTestSnapshotter() - swingsetSnapshotter.isConfigured = func() bool { return false } - err := swingsetSnapshotter.InitiateSnapshot(123) +func TestSwingStoreSnapshotterInProgress(t *testing.T) { + exportsHandler := newTestSwingStoreExportsHandler() + ch := make(chan struct{}) + exportEventHandler := newTestSwingStoreEventHandler() + exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { + <-ch + return nil + } + err := exportsHandler.InitiateExport(123, exportEventHandler) + if err != nil { + t.Fatal(err) + } + err = WaitUntilSwingStoreExportStarted() + if err != nil { + t.Fatal(err) + } + + err = exportsHandler.InitiateExport(456, newTestSwingStoreEventHandler()) + if err == nil { + t.Error("wanted error for export operation in progress") + } + + err = exportsHandler.RestoreExport(SwingStoreExportProvider{BlockHeight: 456}) + if err == nil { + t.Error("wanted error for export operation in progress") + } + + close(ch) + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } + err = exportsHandler.InitiateExport(456, exportEventHandler) + if err != nil { + t.Fatal(err) + } + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } +} + +func TestExtensionSnapshotterNotConfigured(t *testing.T) { + extensionSnapshotter := newTestExtensionSnapshotter() + extensionSnapshotter.isConfigured = func() bool { return false } + err := extensionSnapshotter.InitiateSnapshot(123) if err == nil { t.Error("wanted error for unconfigured snapshot manager") } } -func TestSecondCommit(t *testing.T) { - swingsetSnapshotter := newTestSnapshotter() +func TestExtensionSnapshotterSecondCommit(t *testing.T) { + extensionSnapshotter := newTestExtensionSnapshotter() // Use a channel to block the snapshot goroutine after it has started but before it exits. ch := make(chan struct{}) - swingsetSnapshotter.takeSnapshot = func(height int64) { + extensionSnapshotter.takeAppSnapshot = func(height int64) { <-ch } @@ -89,7 +171,7 @@ func TestSecondCommit(t *testing.T) { if err != nil { t.Fatal(err) } - err = swingsetSnapshotter.InitiateSnapshot(123) + err = extensionSnapshotter.InitiateSnapshot(123) if err != nil { t.Fatal(err) } @@ -108,9 +190,45 @@ func TestSecondCommit(t *testing.T) { } } -func TestInitiateFails(t *testing.T) { - swingsetSnapshotter := newTestSnapshotter() - swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { +func TestSwingStoreSnapshotterSecondCommit(t *testing.T) { + exportsHandler := newTestSwingStoreExportsHandler() + + exportEventHandler := newTestSwingStoreEventHandler() + // Use a channel to block the snapshot goroutine after it has started but before it exits. + ch := make(chan struct{}) + exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { + <-ch + return nil + } + + // First run through app.Commit() + err := WaitUntilSwingStoreExportStarted() + if err != nil { + t.Fatal(err) + } + err = exportsHandler.InitiateExport(123, exportEventHandler) + if err != nil { + t.Fatal(err) + } + + // Second run through app.Commit() - should return right away + err = WaitUntilSwingStoreExportStarted() + if err != nil { + t.Fatal(err) + } + + // close the signaling channel to let goroutine exit + close(ch) + err = WaitUntilSwingStoreExportDone() + if err != nil { + t.Fatal(err) + } +} + +func TestSwingStoreSnapshotterInitiateFails(t *testing.T) { + exportsHandler := newTestSwingStoreExportsHandler() + exportEventHandler := newTestSwingStoreEventHandler() + exportsHandler.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { initiateAction, ok := action.(*swingStoreInitiateExportAction) if ok && initiateAction.Request == "initiate" { return "", errors.New("initiate failed") @@ -118,7 +236,7 @@ func TestInitiateFails(t *testing.T) { return "", nil } - err := swingsetSnapshotter.InitiateSnapshot(123) + err := exportsHandler.InitiateExport(123, exportEventHandler) if err != nil { t.Fatal(err) } @@ -140,10 +258,10 @@ func TestInitiateFails(t *testing.T) { } } -func TestRetrievalFails(t *testing.T) { - swingsetSnapshotter := newTestSnapshotter() +func TestSwingStoreSnapshotterRetrievalFails(t *testing.T) { + exportsHandler := newTestSwingStoreExportsHandler() var retrieveError error - swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { + exportsHandler.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { retrieveAction, ok := action.(*swingStoreRetrieveExportAction) if ok && retrieveAction.Request == "retrieve" { retrieveError = errors.New("retrieve failed") @@ -151,16 +269,16 @@ func TestRetrievalFails(t *testing.T) { } return "", nil } - nilWriter := func(_ []byte) error { return nil } + exportEventHandler := newTestSwingStoreEventHandler() var savedErr error ch := make(chan struct{}) - swingsetSnapshotter.takeSnapshot = func(height int64) { - // shortcut to the snapshot manager calling the extension - savedErr = swingsetSnapshotter.SnapshotExtension(uint64(height), nilWriter) + exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { + savedErr = retrieveExport() <-ch + return savedErr } - err := swingsetSnapshotter.InitiateSnapshot(123) + err := exportsHandler.InitiateExport(123, exportEventHandler) if err != nil { t.Fatal(err) } @@ -179,10 +297,10 @@ func TestRetrievalFails(t *testing.T) { } } -func TestDiscard(t *testing.T) { +func TestSwingStoreSnapshotterDiscard(t *testing.T) { discardCalled := false - swingsetSnapshotter := newTestSnapshotter() - swingsetSnapshotter.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { + exportsHandler := newTestSwingStoreExportsHandler() + exportsHandler.blockingSend = func(action vm.Jsonable, mustNotBeInited bool) (string, error) { discardAction, ok := action.(*swingStoreDiscardExportAction) if ok && discardAction.Request == "discard" { discardCalled = true @@ -190,11 +308,13 @@ func TestDiscard(t *testing.T) { return "", nil } - // simulate a normal Snapshot() call which calls SnapshotExtension() - swingsetSnapshotter.takeSnapshot = func(height int64) { + // simulate an onExportStarted which successfully calls retrieveExport() + exportEventHandler := newTestSwingStoreEventHandler() + exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { activeOperation.exportRetrieved = true + return nil } - err := swingsetSnapshotter.InitiateSnapshot(123) + err := exportsHandler.InitiateExport(123, exportEventHandler) if err != nil { t.Fatal(err) } @@ -206,9 +326,12 @@ func TestDiscard(t *testing.T) { t.Error("didn't want discard called") } - // simulate a Snapshot() call which doesn't call SnapshotExtension() - swingsetSnapshotter.takeSnapshot = func(height int64) {} - err = swingsetSnapshotter.InitiateSnapshot(456) + // simulate an onExportStarted which doesn't call retrieveExport() + exportEventHandler = newTestSwingStoreEventHandler() + exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { + return nil + } + err = exportsHandler.InitiateExport(456, exportEventHandler) if err != nil { t.Fatal(err) } From 0c0e74227d34d49ac7ce76ce8e92715816d5ea6a Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 24 Jul 2023 20:39:34 +0000 Subject: [PATCH 072/109] feat(x/swingset): allow taking snapshot latest height --- .../keeper/swing_store_exports_handler.go | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index 45ca40b31d3..963bbb0a81b 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -123,9 +123,9 @@ const swingStoreExportActionType = "SWING_STORE_EXPORT" const initiateRequest = "initiate" type swingStoreInitiateExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "initiate" - BlockHeight uint64 `json:"blockHeight"` // expected blockHeight + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "initiate" + BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if no blockHeight requested (latest) } // retrieveRequest is the request type for retrieving an initiated export @@ -399,7 +399,12 @@ func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64 return err } - logger := exportsHandler.logger.With("height", blockHeight) + var logger log.Logger + if blockHeight != 0 { + logger = exportsHandler.logger.With("height", blockHeight) + } else { + logger = exportsHandler.logger.With("height", "latest") + } // Indicate that an export operation has been initiated by setting the global // activeOperation var. @@ -564,7 +569,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return err } - if manifest.BlockHeight != blockHeight { + if blockHeight != 0 && manifest.BlockHeight != blockHeight { return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, blockHeight) } From 3613b04a32cdcc39ece2545e1d54efaff6097b8c Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 12 Jul 2023 23:10:47 +0000 Subject: [PATCH 073/109] chore(cosmic-swingset): thread snapshot options through --- .../swingset/keeper/extension_snapshotter.go | 6 +- .../keeper/swing_store_exports_handler.go | 65 ++++++++++++++++--- .../swing_store_exports_handler_test.go | 18 ++--- packages/cosmic-swingset/src/chain-main.js | 47 +++++++++++--- .../cosmic-swingset/src/export-kernel-db.js | 19 ++++++ .../cosmic-swingset/src/import-kernel-db.js | 18 +++++ 6 files changed, 143 insertions(+), 30 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index f7902ebb4da..e6f5e28d666 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -131,7 +131,10 @@ func (snapshotter *ExtensionSnapshotter) InitiateSnapshot(height int64) error { blockHeight := uint64(height) - return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter) + return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter, SwingStoreExportOptions{ + ExportMode: SwingStoreExportModeCurrent, + IncludeExportData: false, + }) } // OnExportStarted performs the actual cosmos state-sync app snapshot. @@ -313,5 +316,6 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo return snapshotter.swingStoreExportsHandler.RestoreExport( SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}, + SwingStoreRestoreOptions{IncludeHistorical: false}, ) } diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index 963bbb0a81b..a01bcf35ff3 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -123,9 +123,10 @@ const swingStoreExportActionType = "SWING_STORE_EXPORT" const initiateRequest = "initiate" type swingStoreInitiateExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "initiate" - BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if no blockHeight requested (latest) + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "initiate" + BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if no blockHeight requested (latest) + Args [1]SwingStoreExportOptions `json:"args"` } // retrieveRequest is the request type for retrieving an initiated export @@ -150,10 +151,50 @@ type swingStoreDiscardExportAction struct { const restoreRequest = "restore" type swingStoreRestoreExportAction struct { - Type string `json:"type"` // "SWING_STORE_EXPORT" - Request string `json:"request"` // "restore" - BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if deferring blockHeight to the manifest - Args [1]string `json:"args"` // args[1] is the directory in which the export to restore from is located + Type string `json:"type"` // "SWING_STORE_EXPORT" + Request string `json:"request"` // "restore" + BlockHeight uint64 `json:"blockHeight,omitempty"` // empty if deferring blockHeight to the manifest + Args [1]swingStoreImportOptions `json:"args"` +} + +// SwingStoreExportModeCurrent represents the minimal set of artifacts needed +// to operate a node. +const SwingStoreExportModeCurrent = "current" + +// SwingStoreExportModeArchival represents the set of all artifacts needed to +// not lose any historical state. +const SwingStoreExportModeArchival = "archival" + +// SwingStoreExportModeDebug represents the maximal set of artifacts available +// in the JS swing-store, including any kept around for debugging purposed only +// (like previous XS heap snapshots) +const SwingStoreExportModeDebug = "debug" + +// SwingStoreExportOptions are configurable options provided to the JS swing-store export +type SwingStoreExportOptions struct { + // The export mode can be "current", "archival" or "debug" (SwingStoreExportMode* const) + // See packages/cosmic-swingset/src/export-kernel-db.js initiateSwingStoreExport and + // packages/swing-store/src/swingStore.js makeSwingStoreExporter + ExportMode string `json:"exportMode,omitempty"` + // A flag indicating whether "export data" should be part of the swing-store export + // If false, the resulting SwingStoreExportProvider's GetExportData will + // return an empty list of "export data" entries. + IncludeExportData bool `json:"includeExportData,omitempty"` +} + +// SwingStoreRestoreOptions are configurable options provided to the JS swing-store import +type SwingStoreRestoreOptions struct { + // A flag indicating whether the swing-store import should attempt to load + // all historical artifacts available from the export provider + IncludeHistorical bool `json:"includeHistorical,omitempty"` +} + +type swingStoreImportOptions struct { + // ExportDir is the directory created by RestoreExport that JS swing-store + // should import from. + ExportDir string `json:"exportDir"` + // IncludeHistorical is a copy of SwingStoreRestoreOptions.IncludeHistorical + IncludeHistorical bool `json:"includeHistorical,omitempty"` } var disallowedArtifactNameChar = regexp.MustCompile(`[^-_.a-zA-Z0-9]`) @@ -393,7 +434,7 @@ func NewSwingStoreExportsHandler(logger log.Logger, blockingSend func(action vm. // from the goroutine that initiated the export. // // Must be called by the main goroutine -func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64, eventHandler SwingStoreExportEventHandler) error { +func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64, eventHandler SwingStoreExportEventHandler, exportOptions SwingStoreExportOptions) error { err := checkNotActive() if err != nil { return err @@ -447,6 +488,7 @@ func (exportsHandler SwingStoreExportsHandler) InitiateExport(blockHeight uint64 Type: swingStoreExportActionType, BlockHeight: blockHeight, Request: initiateRequest, + Args: [1]SwingStoreExportOptions{exportOptions}, } // blockingSend for SWING_STORE_EXPORT action is safe to call from a goroutine @@ -645,7 +687,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved // RestoreExport restores the JS swing-store using previously exported data and artifacts. // // Must be called by the main goroutine -func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStoreExportProvider) error { +func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStoreExportProvider, restoreOptions SwingStoreRestoreOptions) error { err := checkNotActive() if err != nil { return err @@ -759,7 +801,10 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore Type: swingStoreExportActionType, BlockHeight: blockHeight, Request: restoreRequest, - Args: [1]string{exportDir}, + Args: [1]swingStoreImportOptions{{ + ExportDir: exportDir, + IncludeHistorical: restoreOptions.IncludeHistorical, + }}, } _, err = exportsHandler.blockingSend(action, true) diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go index 32f12a6f6d7..7396c501157 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go @@ -58,7 +58,7 @@ func TestSwingStoreSnapshotterInProgress(t *testing.T) { <-ch return nil } - err := exportsHandler.InitiateExport(123, exportEventHandler) + err := exportsHandler.InitiateExport(123, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } @@ -67,12 +67,12 @@ func TestSwingStoreSnapshotterInProgress(t *testing.T) { t.Fatal(err) } - err = exportsHandler.InitiateExport(456, newTestSwingStoreEventHandler()) + err = exportsHandler.InitiateExport(456, newTestSwingStoreEventHandler(), SwingStoreExportOptions{}) if err == nil { t.Error("wanted error for export operation in progress") } - err = exportsHandler.RestoreExport(SwingStoreExportProvider{BlockHeight: 456}) + err = exportsHandler.RestoreExport(SwingStoreExportProvider{BlockHeight: 456}, SwingStoreRestoreOptions{}) if err == nil { t.Error("wanted error for export operation in progress") } @@ -82,7 +82,7 @@ func TestSwingStoreSnapshotterInProgress(t *testing.T) { if err != nil { t.Fatal(err) } - err = exportsHandler.InitiateExport(456, exportEventHandler) + err = exportsHandler.InitiateExport(456, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } @@ -108,7 +108,7 @@ func TestSwingStoreSnapshotterSecondCommit(t *testing.T) { if err != nil { t.Fatal(err) } - err = exportsHandler.InitiateExport(123, exportEventHandler) + err = exportsHandler.InitiateExport(123, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } @@ -138,7 +138,7 @@ func TestSwingStoreSnapshotterInitiateFails(t *testing.T) { return "", nil } - err := exportsHandler.InitiateExport(123, exportEventHandler) + err := exportsHandler.InitiateExport(123, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } @@ -180,7 +180,7 @@ func TestSwingStoreSnapshotterRetrievalFails(t *testing.T) { return savedErr } - err := exportsHandler.InitiateExport(123, exportEventHandler) + err := exportsHandler.InitiateExport(123, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } @@ -216,7 +216,7 @@ func TestSwingStoreSnapshotterDiscard(t *testing.T) { activeOperation.exportRetrieved = true return nil } - err := exportsHandler.InitiateExport(123, exportEventHandler) + err := exportsHandler.InitiateExport(123, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } @@ -233,7 +233,7 @@ func TestSwingStoreSnapshotterDiscard(t *testing.T) { exportEventHandler.onExportStarted = func(height uint64, retrieveExport func() error) error { return nil } - err = exportsHandler.InitiateExport(456, exportEventHandler) + err = exportsHandler.InitiateExport(456, exportEventHandler, SwingStoreExportOptions{}) if err != nil { t.Fatal(err) } diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 6418b07de96..6ab413d33c1 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -39,8 +39,14 @@ import stringify from './helpers/json-stable-stringify.js'; import { launch } from './launch-chain.js'; import { getTelemetryProviders } from './kernel-stats.js'; import { makeProcessValue } from './helpers/process-value.js'; -import { spawnSwingStoreExport } from './export-kernel-db.js'; -import { performStateSyncImport } from './import-kernel-db.js'; +import { + spawnSwingStoreExport, + validateExporterOptions, +} from './export-kernel-db.js'; +import { + performStateSyncImport, + validateImporterOptions, +} from './import-kernel-db.js'; // eslint-disable-next-line no-unused-vars let whenHellFreezesOver = null; @@ -497,26 +503,44 @@ export default async function main(progname, args, { env, homedir, agcc }) { async function handleSwingStoreExport(blockHeight, request, requestArgs) { switch (request) { case 'restore': { - const exportDir = requestArgs[0]; - if (typeof exportDir !== 'string') { - throw Fail`Invalid exportDir argument ${q(exportDir)}`; - } + const requestOptions = + typeof requestArgs[0] === 'string' + ? { exportDir: requestArgs[0] } + : requestArgs[0] || {}; + const options = { + ...requestOptions, + stateDir: stateDBDir, + blockHeight, + }; + validateImporterOptions(options); !stateSyncExport || Fail`Snapshot already in progress for ${stateSyncExport.blockHeight}`; !blockingSend || Fail`Cannot restore snapshot after init`; console.info( 'Restoring SwingSet state from snapshot at block height', blockHeight, + 'with options', + JSON.stringify(requestOptions), ); - return performStateSyncImport( - { exportDir, stateDir: stateDBDir, blockHeight }, - { fs: { ...fs, ...fsPromises }, pathResolve, log: null }, - ); + return performStateSyncImport(options, { + fs: { ...fs, ...fsPromises }, + pathResolve, + log: null, + }); } case 'initiate': { !stateSyncExport || Fail`Snapshot already in progress for ${stateSyncExport.blockHeight}`; + const requestOptions = requestArgs[0] || {}; + + validateExporterOptions({ + ...requestOptions, + stateDir: stateDBDir, + exportDir: '', + blockHeight, + }); + const exportData = /** @type {Required>} */ ({ blockHeight, @@ -560,9 +584,12 @@ export default async function main(progname, args, { env, homedir, agcc }) { console.info( 'Initiating SwingSet state snapshot at block height', blockHeight, + 'with options', + JSON.stringify(requestOptions), ); exportData.exporter = spawnSwingStoreExport( { + ...requestOptions, stateDir: stateDBDir, exportDir: exportData.exportDir, blockHeight, diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index 970be626a0d..7705c488c03 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -77,6 +77,25 @@ const checkExportMode = mode => { * @property {boolean} [includeExportData] whether to include an artifact for the export data in the export */ +/** + * @param {object} options + * @returns {asserts options is StateSyncExporterOptions} + */ +export const validateExporterOptions = options => { + typeof options === 'object' || Fail`options is not an object`; + typeof options.stateDir === 'string' || + Fail`required stateDir option not a string`; + typeof options.exportDir === 'string' || + Fail`required exportDir option not a string`; + options.blockHeight == null || + typeof options.blockHeight === 'number' || + Fail`optional blockHeight option not a number`; + checkExportMode(options.exportMode); + options.includeExportData == null || + typeof options.includeExportData === 'boolean' || + Fail`optional includeExportData option not a boolean`; +}; + /** * @param {StateSyncExporterOptions} options * @param {object} powers diff --git a/packages/cosmic-swingset/src/import-kernel-db.js b/packages/cosmic-swingset/src/import-kernel-db.js index 113b5415f90..ff1d3a5f9a8 100755 --- a/packages/cosmic-swingset/src/import-kernel-db.js +++ b/packages/cosmic-swingset/src/import-kernel-db.js @@ -27,6 +27,24 @@ import { ExportManifestFileName } from './export-kernel-db.js'; * @property {boolean} [includeHistorical] whether to include historical artifacts in the export */ +/** + * @param {object} options + * @returns {asserts options is StateSyncImporterOptions} + */ +export const validateImporterOptions = options => { + typeof options === 'object' || Fail`options is not an object`; + typeof options.stateDir === 'string' || + Fail`required stateDir option not a string`; + typeof options.exportDir === 'string' || + Fail`required exportDir option not a string`; + options.blockHeight == null || + typeof options.blockHeight === 'number' || + Fail`optional blockHeight option not a number`; + options.includeHistorical == null || + typeof options.includeHistorical === 'boolean' || + Fail`optional includeHistorical option not a boolean`; +}; + /** * @param {StateSyncImporterOptions} options * @param {object} powers From d1cdf56da26890521b9216629f65f6ebe526d747 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Wed, 19 Jul 2023 19:03:25 -0700 Subject: [PATCH 074/109] refactor(swing-store): refactor swingstore into better pieces This creates new files to house `importSwingStore`, `makeSwingStoreExporter`, and the kvStore. All were previously in `swingStore.js`, which is now a shell that gathers together the other components. The module exports have been consolidated into `src/index.js`, and types are now exported by a `types.d.ts`. No behavioral changes. --- packages/swing-store/package.json | 5 +- packages/swing-store/src/bundleStore.js | 2 +- packages/swing-store/src/exporter.js | 173 +++++ packages/swing-store/src/importer.js | 204 ++++++ packages/swing-store/src/index.js | 11 + packages/swing-store/src/internal.js | 14 + packages/swing-store/src/kvStore.js | 172 +++++ packages/swing-store/src/snapStore.js | 2 +- packages/swing-store/src/snapStoreIO.js | 8 + packages/swing-store/src/swingStore.js | 633 ++---------------- packages/swing-store/src/transcriptStore.js | 2 +- packages/swing-store/src/types.d.ts | 14 + packages/swing-store/src/types.js | 6 + packages/swing-store/src/util.js | 8 +- packages/swing-store/test/test-bundles.js | 2 +- .../swing-store/test/test-exportImport.js | 8 +- 16 files changed, 665 insertions(+), 599 deletions(-) create mode 100644 packages/swing-store/src/exporter.js create mode 100644 packages/swing-store/src/importer.js create mode 100644 packages/swing-store/src/index.js create mode 100644 packages/swing-store/src/internal.js create mode 100644 packages/swing-store/src/kvStore.js create mode 100644 packages/swing-store/src/snapStoreIO.js create mode 100644 packages/swing-store/src/types.d.ts create mode 100644 packages/swing-store/src/types.js diff --git a/packages/swing-store/package.json b/packages/swing-store/package.json index 7626c5bec46..b597852effd 100644 --- a/packages/swing-store/package.json +++ b/packages/swing-store/package.json @@ -3,7 +3,10 @@ "version": "0.9.1", "description": "Persistent storage for SwingSet", "type": "module", - "main": "src/swingStore.js", + "main": "./src/index.js", + "exports": { + ".": "./src/index.js" + }, "repository": "https://github.com/Agoric/agoric-sdk", "author": "Agoric", "license": "Apache-2.0", diff --git a/packages/swing-store/src/bundleStore.js b/packages/swing-store/src/bundleStore.js index 2ccfb69c7b0..54076710821 100644 --- a/packages/swing-store/src/bundleStore.js +++ b/packages/swing-store/src/bundleStore.js @@ -16,7 +16,7 @@ import { buffer } from './util.js'; * @typedef { EndoZipBase64Bundle | GetExportBundle | NestedEvaluateBundle } Bundle */ /** - * @typedef { import('./swingStore').SwingStoreExporter } SwingStoreExporter + * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter * * @typedef {{ * addBundle: (bundleID: string, bundle: Bundle) => void; diff --git a/packages/swing-store/src/exporter.js b/packages/swing-store/src/exporter.js new file mode 100644 index 00000000000..80f1c030f86 --- /dev/null +++ b/packages/swing-store/src/exporter.js @@ -0,0 +1,173 @@ +import sqlite3 from 'better-sqlite3'; + +import { Fail, q } from '@agoric/assert'; + +import { dbFileInDirectory } from './util.js'; +import { getKeyType } from './kvStore.js'; +import { makeBundleStore } from './bundleStore.js'; +import { makeSnapStore } from './snapStore.js'; +import { makeSnapStoreIO } from './snapStoreIO.js'; +import { makeTranscriptStore } from './transcriptStore.js'; + +/** + * @template T + * @typedef { Iterable | AsyncIterable } AnyIterable + */ +/** + * @template T + * @typedef { IterableIterator | AsyncIterableIterator } AnyIterableIterator + */ + +/** + * + * @typedef {readonly [ + * key: string, + * value?: string | null | undefined, + * ]} KVPair + * + * @typedef {object} SwingStoreExporter + * + * Allows export of data from a swingStore as a fixed view onto the content as + * of the most recent commit point at the time the exporter was created. The + * exporter may be used while another SwingStore instance is active for the same + * DB, possibly in another thread or process. It guarantees that regardless of + * the concurrent activity of other swingStore instances, the data representing + * the commit point will stay consistent and available. + * + * @property {() => AnyIterableIterator} getExportData + * + * Get a full copy of the first-stage export data (key-value pairs) from the + * swingStore. This represents both the contents of the KVStore (excluding host + * and local prefixes), as well as any data needed to validate all artifacts, + * both current and historical. As such it represents the root of trust for the + * application. + * + * Content of validation data (with supporting entries for indexing): + * - kv.${key} = ${value} // ordinary kvStore data entry + * - snapshot.${vatID}.${snapPos} = ${{ vatID, snapPos, hash }); + * - snapshot.${vatID}.current = `snapshot.${vatID}.${snapPos}` + * - transcript.${vatID}.${startPos} = ${{ vatID, startPos, endPos, hash }} + * - transcript.${vatID}.current = ${{ vatID, startPos, endPos, hash }} + * + * @property {() => AnyIterableIterator} getArtifactNames + * + * Get a list of name of artifacts available from the swingStore. A name returned + * by this method guarantees that a call to `getArtifact` on the same exporter + * instance will succeed. Options control the filtering of the artifact names + * yielded. + * + * Artifact names: + * - transcript.${vatID}.${startPos}.${endPos} + * - snapshot.${vatID}.${snapPos} + * + * @property {(name: string) => AnyIterableIterator} getArtifact + * + * Retrieve an artifact by name. May throw if the artifact is not available, + * which can occur if the artifact is historical and wasn't been preserved. + * + * @property {() => Promise} close + * + * Dispose of all resources held by this exporter. Any further operation on this + * exporter or its outstanding iterators will fail. + */ + +/** + * @typedef {'current' | 'archival' | 'debug'} ExportMode + */ + +/** + * @param {string} dirPath + * @param { ExportMode } exportMode + * @returns {SwingStoreExporter} + */ +export function makeSwingStoreExporter(dirPath, exportMode = 'current') { + typeof dirPath === 'string' || Fail`dirPath must be a string`; + exportMode === 'current' || + exportMode === 'archival' || + exportMode === 'debug' || + Fail`invalid exportMode ${q(exportMode)}`; + const exportHistoricalSnapshots = exportMode === 'debug'; + const exportHistoricalTranscripts = exportMode !== 'current'; + const filePath = dbFileInDirectory(dirPath); + const db = sqlite3(filePath); + + // Execute the data export in a (read) transaction, to ensure that we are + // capturing the state of the database at a single point in time. Our close() + // will ROLLBACK the txn just in case some bug tried to change the DB. + const sqlBeginTransaction = db.prepare('BEGIN TRANSACTION'); + sqlBeginTransaction.run(); + + // ensureTxn can be a dummy, we just started one + const ensureTxn = () => {}; + const snapStore = makeSnapStore(db, ensureTxn, makeSnapStoreIO()); + const bundleStore = makeBundleStore(db, ensureTxn); + const transcriptStore = makeTranscriptStore(db, ensureTxn, () => {}); + + const sqlGetAllKVData = db.prepare(` + SELECT key, value + FROM kvStore + ORDER BY key + `); + + /** + * @returns {AsyncIterableIterator} + * @yields {KVPair} + */ + async function* getExportData() { + const kvPairs = sqlGetAllKVData.iterate(); + for (const kv of kvPairs) { + if (getKeyType(kv.key) === 'consensus') { + yield [`kv.${kv.key}`, kv.value]; + } + } + yield* snapStore.getExportRecords(true); + yield* transcriptStore.getExportRecords(true); + yield* bundleStore.getExportRecords(); + } + + /** + * @returns {AsyncIterableIterator} + * @yields {string} + */ + async function* getArtifactNames() { + yield* snapStore.getArtifactNames(exportHistoricalSnapshots); + yield* transcriptStore.getArtifactNames(exportHistoricalTranscripts); + yield* bundleStore.getArtifactNames(); + } + + /** + * @param {string} name + * @returns {AsyncIterableIterator} + */ + function getArtifact(name) { + typeof name === 'string' || Fail`artifact name must be a string`; + const [type] = name.split('.', 1); + + if (type === 'snapshot') { + return snapStore.exportSnapshot(name, exportHistoricalSnapshots); + } else if (type === 'transcript') { + return transcriptStore.exportSpan(name, exportHistoricalTranscripts); + } else if (type === 'bundle') { + return bundleStore.exportBundle(name); + } else { + throw Fail`invalid artifact type ${q(type)}`; + } + } + + const sqlAbort = db.prepare('ROLLBACK'); + + async function close() { + // After all the data has been extracted, always abort the export + // transaction to ensure that the export was read-only (i.e., that no bugs + // inadvertantly modified the database). + sqlAbort.run(); + db.close(); + } + + return harden({ + getExportData, + getArtifactNames, + getArtifact, + close, + }); +} diff --git a/packages/swing-store/src/importer.js b/packages/swing-store/src/importer.js new file mode 100644 index 00000000000..e2eae80ab60 --- /dev/null +++ b/packages/swing-store/src/importer.js @@ -0,0 +1,204 @@ +import { Fail, q } from '@agoric/assert'; + +import { makeSwingStore } from './swingStore.js'; + +/** + * Function used to create a new swingStore from an object implementing the + * exporter API. The exporter API may be provided by a swingStore instance, or + * implemented by a host to restore data that was previously exported. + * + * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter + * @typedef { import('./swingStore').SwingStore } SwingStore + * @typedef {(exporter: SwingStoreExporter) => Promise} ImportSwingStore + */ + +function parseVatArtifactExportKey(key) { + const parts = key.split('.'); + const [_type, vatID, rawPos] = parts; + // prettier-ignore + parts.length === 3 || + Fail`expected artifact name of the form '{type}.{vatID}.{pos}', saw ${q(key)}`; + const isCurrent = rawPos === 'current'; + let pos; + if (isCurrent) { + pos = -1; + } else { + pos = Number(rawPos); + } + + return { vatID, isCurrent, pos }; +} + +function artifactKey(type, vatID, pos) { + return `${type}.${vatID}.${pos}`; +} + +/** + * @param {SwingStoreExporter} exporter + * @param {string | null} [dirPath] + * @param {object} options + * @returns {Promise} + */ +export async function importSwingStore(exporter, dirPath = null, options = {}) { + if (dirPath) { + typeof dirPath === 'string' || Fail`dirPath must be a string`; + } + const { includeHistorical = false } = options; + const store = makeSwingStore(dirPath, true, options); + const { kernelStorage, internal } = store; + + // Artifact metadata, keyed as `${type}.${vatID}.${pos}` + // + // Note that this key is almost but not quite the artifact name, since the + // names of transcript span artifacts also include the endPos, but the endPos + // value is in flux until the span is complete. + const artifactMetadata = new Map(); + + // Each vat requires a transcript span and (usually) a snapshot. This table + // tracks which of these we've seen, keyed by vatID. + // vatID -> { snapshotKey: metadataKey, transcriptKey: metatdataKey } + const vatArtifacts = new Map(); + const bundleArtifacts = new Map(); + + for await (const [key, value] of exporter.getExportData()) { + const [tag] = key.split('.', 1); + const subKey = key.substring(tag.length + 1); + if (tag === 'kv') { + // 'kv' keys contain individual kvStore entries + if (value == null) { + // Note '==' rather than '===': any nullish value implies deletion + kernelStorage.kvStore.delete(subKey); + } else { + kernelStorage.kvStore.set(subKey, value); + } + } else if (tag === 'bundle') { + // 'bundle' keys contain bundle IDs + if (value == null) { + bundleArtifacts.delete(key); + } else { + bundleArtifacts.set(key, value); + } + } else if (tag === 'transcript' || tag === 'snapshot') { + // 'transcript' and 'snapshot' keys contain artifact description info. + assert(value); // make TypeScript shut up + const { vatID, isCurrent, pos } = parseVatArtifactExportKey(key); + if (isCurrent) { + const vatInfo = vatArtifacts.get(vatID) || {}; + if (tag === 'snapshot') { + // `export.snapshot.{vatID}.current` directly identifies the current snapshot artifact + vatInfo.snapshotKey = value; + } else if (tag === 'transcript') { + // `export.transcript.${vatID}.current` contains a metadata record for the current + // state of the current transcript span as of the time of export + const metadata = JSON.parse(value); + vatInfo.transcriptKey = artifactKey(tag, vatID, metadata.startPos); + artifactMetadata.set(vatInfo.transcriptKey, metadata); + } + vatArtifacts.set(vatID, vatInfo); + } else { + artifactMetadata.set(artifactKey(tag, vatID, pos), JSON.parse(value)); + } + } else { + Fail`unknown artifact type tag ${q(tag)} on import`; + } + } + + // At this point we should have acquired the entire KV store state, plus + // sufficient metadata to identify the complete set of artifacts we'll need to + // fetch along with the information required to validate each of them after + // fetching. + // + // Depending on how the export was parameterized, the metadata may also include + // information about historical artifacts that we might or might not actually + // fetch depending on how this import was parameterized + + // Fetch the set of current artifacts. + + // Keep track of fetched artifacts in this set so we don't fetch them a second + // time if we are trying for historical artifacts also. + const fetchedArtifacts = new Set(); + + for await (const [vatID, vatInfo] of vatArtifacts.entries()) { + // For each vat, we *must* have a transcript span. If this is not the very + // first transcript span in the history of that vat, then we also must have + // a snapshot for the state of the vat immediately prior to when the + // transcript span begins. + vatInfo.transcriptKey || + Fail`missing current transcript key for vat ${q(vatID)}`; + const transcriptInfo = artifactMetadata.get(vatInfo.transcriptKey); + transcriptInfo || Fail`missing transcript metadata for vat ${q(vatID)}`; + let snapshotInfo; + if (vatInfo.snapshotKey) { + snapshotInfo = artifactMetadata.get(vatInfo.snapshotKey); + snapshotInfo || Fail`missing snapshot metadata for vat ${q(vatID)}`; + } + if (!snapshotInfo) { + transcriptInfo.startPos === 0 || + Fail`missing current snapshot for vat ${q(vatID)}`; + } else { + snapshotInfo.snapPos + 1 === transcriptInfo.startPos || + Fail`current transcript for vat ${q(vatID)} doesn't go with snapshot`; + fetchedArtifacts.add(vatInfo.snapshotKey); + } + await (!snapshotInfo || + internal.snapStore.importSnapshot( + vatInfo.snapshotKey, + exporter, + snapshotInfo, + )); + + const transcriptArtifactName = `${vatInfo.transcriptKey}.${transcriptInfo.endPos}`; + await internal.transcriptStore.importSpan( + transcriptArtifactName, + exporter, + transcriptInfo, + ); + fetchedArtifacts.add(transcriptArtifactName); + } + const bundleArtifactNames = Array.from(bundleArtifacts.keys()).sort(); + for await (const bundleArtifactName of bundleArtifactNames) { + await internal.bundleStore.importBundle( + bundleArtifactName, + exporter, + bundleArtifacts.get(bundleArtifactName), + ); + } + + if (!includeHistorical) { + await exporter.close(); + return store; + } + + // If we're also importing historical artifacts, have the exporter enumerate + // the complete set of artifacts it has and fetch all of them except for the + // ones we've already fetched. + for await (const artifactName of exporter.getArtifactNames()) { + if (fetchedArtifacts.has(artifactName)) { + continue; + } + let fetchedP; + if (artifactName.startsWith('snapshot.')) { + fetchedP = internal.snapStore.importSnapshot( + artifactName, + exporter, + artifactMetadata.get(artifactName), + ); + } else if (artifactName.startsWith('transcript.')) { + // strip endPos off artifact name + const metadataKey = artifactName.split('.').slice(0, 3).join('.'); + fetchedP = internal.transcriptStore.importSpan( + artifactName, + exporter, + artifactMetadata.get(metadataKey), + ); + } else if (artifactName.startsWith('bundle.')) { + // already taken care of + continue; + } else { + Fail`unknown artifact type: ${artifactName}`; + } + await fetchedP; + } + await exporter.close(); + return store; +} diff --git a/packages/swing-store/src/index.js b/packages/swing-store/src/index.js new file mode 100644 index 00000000000..f2144bc43ee --- /dev/null +++ b/packages/swing-store/src/index.js @@ -0,0 +1,11 @@ +export { initSwingStore, openSwingStore, isSwingStore } from './swingStore.js'; +export { makeSwingStoreExporter } from './exporter.js'; +export { importSwingStore } from './importer.js'; + +// temporary, for the benefit of SwingSet/misc-tools/replay-transcript.js +export { makeSnapStore } from './snapStore.js'; +// and less temporary, for SwingSet/test/vat-warehouse/test-reload-snapshot.js +export { makeSnapStoreIO } from './snapStoreIO.js'; + +// eslint-disable-next-line import/export +export * from './types.js'; diff --git a/packages/swing-store/src/internal.js b/packages/swing-store/src/internal.js new file mode 100644 index 00000000000..e73a3d11732 --- /dev/null +++ b/packages/swing-store/src/internal.js @@ -0,0 +1,14 @@ +/** + * @typedef { import('./snapStore').SnapStoreInternal } SnapStoreInternal + * @typedef { import('./transcriptStore').TranscriptStoreInternal } TranscriptStoreInternal + * @typedef { import('./bundleStore').BundleStoreInternal } BundleStoreInternal + * + * @typedef {{ + * transcriptStore: TranscriptStoreInternal, + * snapStore: SnapStoreInternal, + * bundleStore: BundleStoreInternal, + * }} SwingStoreInternal + */ + +// Ensure this is a module. +export {}; diff --git a/packages/swing-store/src/kvStore.js b/packages/swing-store/src/kvStore.js new file mode 100644 index 00000000000..bf3e80e740c --- /dev/null +++ b/packages/swing-store/src/kvStore.js @@ -0,0 +1,172 @@ +// @ts-check +import { Fail } from '@agoric/assert'; + +/** + * @typedef {{ + * has: (key: string) => boolean, + * get: (key: string) => string | undefined, + * getNextKey: (previousKey: string) => string | undefined, + * set: (key: string, value: string, bypassHash?: boolean ) => void, + * delete: (key: string) => void, + * }} KVStore + */ + +/** + * @param {string} key + */ +export function getKeyType(key) { + if (key.startsWith('local.')) { + return 'local'; + } else if (key.startsWith('host.')) { + return 'host'; + } + return 'consensus'; +} + +/** + * @param {object} db The SQLite database connection. + * @param {() => void} ensureTxn Called before mutating methods to establish a DB transaction + * @param {(...args: string[]) => void} trace Called after sets/gets to record a debug log + * @returns { KVStore } + */ + +export function makeKVStore(db, ensureTxn, trace) { + db.exec(` + CREATE TABLE IF NOT EXISTS kvStore ( + key TEXT, + value TEXT, + PRIMARY KEY (key) + ) + `); + + const sqlKVGet = db.prepare(` + SELECT value + FROM kvStore + WHERE key = ? + `); + sqlKVGet.pluck(true); + + /** + * Obtain the value stored for a given key. + * + * @param {string} key The key whose value is sought. + * + * @returns {string | undefined} the (string) value for the given key, or + * undefined if there is no such value. + * + * @throws if key is not a string. + */ + function get(key) { + typeof key === 'string' || Fail`key must be a string`; + return sqlKVGet.get(key); + } + + const sqlKVGetNextKey = db.prepare(` + SELECT key + FROM kvStore + WHERE key > ? + LIMIT 1 + `); + sqlKVGetNextKey.pluck(true); + + /** + * getNextKey enables callers to iterate over all keys within a + * given range. To build an iterator of all keys from start + * (inclusive) to end (exclusive), do: + * + * function* iterate(start, end) { + * if (kvStore.has(start)) { + * yield start; + * } + * let prev = start; + * while (true) { + * let next = kvStore.getNextKey(prev); + * if (!next || next >= end) { + * break; + * } + * yield next; + * prev = next; + * } + * } + * + * @param {string} previousKey The key returned will always be later than this one. + * + * @returns {string | undefined} a key string, or undefined if we reach the end of the store + * + * @throws if previousKey is not a string + */ + + function getNextKey(previousKey) { + typeof previousKey === 'string' || Fail`previousKey must be a string`; + return sqlKVGetNextKey.get(previousKey); + } + + /** + * Test if the state contains a value for a given key. + * + * @param {string} key The key that is of interest. + * + * @returns {boolean} true if a value is stored for the key, false if not. + * + * @throws if key is not a string. + */ + function has(key) { + typeof key === 'string' || Fail`key must be a string`; + return get(key) !== undefined; + } + + const sqlKVSet = db.prepare(` + INSERT INTO kvStore (key, value) + VALUES (?, ?) + ON CONFLICT DO UPDATE SET value = excluded.value + `); + + /** + * Store a value for a given key. The value will replace any prior value if + * there was one. + * + * @param {string} key The key whose value is being set. + * @param {string} value The value to set the key to. + * + * @throws if either parameter is not a string. + */ + function set(key, value) { + typeof key === 'string' || Fail`key must be a string`; + typeof value === 'string' || Fail`value must be a string`; + // synchronous read after write within a transaction is safe + // The transaction's overall success will be awaited during commit + ensureTxn(); + sqlKVSet.run(key, value); + trace('set', key, value); + } + + const sqlKVDel = db.prepare(` + DELETE FROM kvStore + WHERE key = ? + `); + + /** + * Remove any stored value for a given key. It is permissible for there to + * be no existing stored value for the key. + * + * @param {string} key The key whose value is to be deleted + * + * @throws if key is not a string. + */ + function del(key) { + typeof key === 'string' || Fail`key must be a string`; + ensureTxn(); + sqlKVDel.run(key); + trace('del', key); + } + + const kvStore = { + has, + get, + getNextKey, + set, + delete: del, + }; + + return kvStore; +} diff --git a/packages/swing-store/src/snapStore.js b/packages/swing-store/src/snapStore.js index b3862fa0784..faed03ca7e0 100644 --- a/packages/swing-store/src/snapStore.js +++ b/packages/swing-store/src/snapStore.js @@ -25,7 +25,7 @@ import { buffer } from './util.js'; */ /** - * @typedef { import('./swingStore').SwingStoreExporter } SwingStoreExporter + * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter * * @typedef {{ * loadSnapshot: (vatID: string) => AsyncIterableIterator, diff --git a/packages/swing-store/src/snapStoreIO.js b/packages/swing-store/src/snapStoreIO.js new file mode 100644 index 00000000000..ddbeb28c64c --- /dev/null +++ b/packages/swing-store/src/snapStoreIO.js @@ -0,0 +1,8 @@ +import { performance } from 'perf_hooks'; +import { makeMeasureSeconds } from '@agoric/internal'; + +export function makeSnapStoreIO() { + return { + measureSeconds: makeMeasureSeconds(performance.now), + }; +} diff --git a/packages/swing-store/src/swingStore.js b/packages/swing-store/src/swingStore.js index 4cf7027d211..4d7a254ab6f 100644 --- a/packages/swing-store/src/swingStore.js +++ b/packages/swing-store/src/swingStore.js @@ -2,74 +2,33 @@ /* global Buffer */ import fs from 'fs'; import path from 'path'; -import { performance } from 'perf_hooks'; import sqlite3 from 'better-sqlite3'; -import { assert, Fail, q } from '@agoric/assert'; -import { makeMeasureSeconds } from '@agoric/internal'; +import { Fail, q } from '@agoric/assert'; +import { dbFileInDirectory } from './util.js'; +import { makeKVStore, getKeyType } from './kvStore.js'; import { makeTranscriptStore } from './transcriptStore.js'; import { makeSnapStore } from './snapStore.js'; import { makeBundleStore } from './bundleStore.js'; import { createSHA256 } from './hasher.js'; - -export { makeSnapStore, makeBundleStore }; - -/** - * This is a polyfill for the `buffer` function from Node's - * 'stream/consumers' package, which unfortunately only exists in newer versions - * of Node. - * - * @param {AsyncIterable} inStream - */ -export const buffer = async inStream => { - const chunks = []; - for await (const chunk of inStream) { - chunks.push(chunk); - } - return Buffer.concat(chunks); -}; - -export function makeSnapStoreIO() { - return { - measureSeconds: makeMeasureSeconds(performance.now), - }; -} - -/** - * @param {string} key - */ -function getKeyType(key) { - if (key.startsWith('local.')) { - return 'local'; - } else if (key.startsWith('host.')) { - return 'host'; - } - return 'consensus'; -} +import { makeSnapStoreIO } from './snapStoreIO.js'; /** - * @typedef {{ - * has: (key: string) => boolean, - * get: (key: string) => string | undefined, - * getNextKey: (previousKey: string) => string | undefined, - * set: (key: string, value: string, bypassHash?: boolean ) => void, - * delete: (key: string) => void, - * }} KVStore + * @typedef { import('./kvStore').KVStore } KVStore * * @typedef { import('./snapStore').SnapStore } SnapStore - * @typedef { import('./snapStore').SnapStoreInternal } SnapStoreInternal * @typedef { import('./snapStore').SnapshotResult } SnapshotResult * * @typedef { import('./transcriptStore').TranscriptStore } TranscriptStore - * @typedef { import('./transcriptStore').TranscriptStoreInternal } TranscriptStoreInternal * @typedef { import('./transcriptStore').TranscriptStoreDebug } TranscriptStoreDebug * * @typedef { import('./bundleStore').BundleStore } BundleStore - * @typedef { import('./bundleStore').BundleStoreInternal } BundleStoreInternal * @typedef { import('./bundleStore').BundleStoreDebug } BundleStoreDebug * + * @typedef { import('./exporter').KVPair } KVPair + * * @typedef {{ * kvStore: KVStore, // a key-value API object to load and store data on behalf of the kernel * transcriptStore: TranscriptStore, // a stream-oriented API object to append and read transcript entries @@ -105,184 +64,13 @@ function getKeyType(key) { * }} SwingStoreDebugTools * * @typedef {{ - * transcriptStore: TranscriptStoreInternal, - * snapStore: SnapStoreInternal, - * bundleStore: BundleStoreInternal, - * }} SwingStoreInternal - * - * @typedef {{ * kernelStorage: SwingStoreKernelStorage, * hostStorage: SwingStoreHostStorage, * debug: SwingStoreDebugTools, - * internal: SwingStoreInternal, + * internal: import('./internal.js').SwingStoreInternal, * }} SwingStore */ -/** - * @template T - * @typedef { Iterable | AsyncIterable } AnyIterable - */ -/** - * @template T - * @typedef { IterableIterator | AsyncIterableIterator } AnyIterableIterator - */ - -/** - * @typedef {readonly [ - * key: string, - * value?: string | null | undefined, - * ]} KVPair - * - * @typedef {object} SwingStoreExporter - * - * Allows export of data from a swingStore as a fixed view onto the content as - * of the most recent commit point at the time the exporter was created. The - * exporter may be used while another SwingStore instance is active for the same - * DB, possibly in another thread or process. It guarantees that regardless of - * the concurrent activity of other swingStore instances, the data representing - * the commit point will stay consistent and available. - * - * @property {() => AnyIterableIterator} getExportData - * - * Get a full copy of the first-stage export data (key-value pairs) from the - * swingStore. This represents both the contents of the KVStore (excluding host - * and local prefixes), as well as any data needed to validate all artifacts, - * both current and historical. As such it represents the root of trust for the - * application. - * - * Content of validation data (with supporting entries for indexing): - * - kv.${key} = ${value} // ordinary kvStore data entry - * - snapshot.${vatID}.${snapPos} = ${{ vatID, snapPos, hash }); - * - snapshot.${vatID}.current = `snapshot.${vatID}.${snapPos}` - * - transcript.${vatID}.${startPos} = ${{ vatID, startPos, endPos, hash }} - * - transcript.${vatID}.current = ${{ vatID, startPos, endPos, hash }} - * - * @property {() => AnyIterableIterator} getArtifactNames - * - * Get a list of name of artifacts available from the swingStore. A name returned - * by this method guarantees that a call to `getArtifact` on the same exporter - * instance will succeed. Options control the filtering of the artifact names - * yielded. - * - * Artifact names: - * - transcript.${vatID}.${startPos}.${endPos} - * - snapshot.${vatID}.${snapPos} - * - * @property {(name: string) => AnyIterableIterator} getArtifact - * - * Retrieve an artifact by name. May throw if the artifact is not available, - * which can occur if the artifact is historical and wasn't been preserved. - * - * @property {() => Promise} close - * - * Dispose of all resources held by this exporter. Any further operation on this - * exporter or its outstanding iterators will fail. - */ - -/** - * @param {string} dirPath - * @param {string} exportMode - * @returns {SwingStoreExporter} - */ -export function makeSwingStoreExporter(dirPath, exportMode = 'current') { - typeof dirPath === 'string' || Fail`dirPath must be a string`; - exportMode === 'current' || - exportMode === 'archival' || - exportMode === 'debug' || - Fail`invalid exportMode ${q(exportMode)}`; - const exportHistoricalSnapshots = exportMode === 'debug'; - const exportHistoricalTranscripts = exportMode !== 'current'; - const filePath = path.join(dirPath, 'swingstore.sqlite'); - const db = sqlite3(filePath); - - // Execute the data export in a (read) transaction, to ensure that we are - // capturing the state of the database at a single point in time. - const sqlBeginTransaction = db.prepare('BEGIN TRANSACTION'); - sqlBeginTransaction.run(); - - // ensureTxn can be a dummy, we just started one - const ensureTxn = () => {}; - const snapStore = makeSnapStore(db, ensureTxn, makeSnapStoreIO()); - const bundleStore = makeBundleStore(db, ensureTxn); - const transcriptStore = makeTranscriptStore(db, ensureTxn, () => {}); - - const sqlGetAllKVData = db.prepare(` - SELECT key, value - FROM kvStore - ORDER BY key - `); - - /** - * @returns {AsyncIterableIterator} - * @yields {KVPair} - */ - async function* getExportData() { - const kvPairs = sqlGetAllKVData.iterate(); - for (const kv of kvPairs) { - if (getKeyType(kv.key) === 'consensus') { - yield [`kv.${kv.key}`, kv.value]; - } - } - yield* snapStore.getExportRecords(true); - yield* transcriptStore.getExportRecords(true); - yield* bundleStore.getExportRecords(); - } - - /** - * @returns {AsyncIterableIterator} - * @yields {string} - */ - async function* getArtifactNames() { - yield* snapStore.getArtifactNames(exportHistoricalSnapshots); - yield* transcriptStore.getArtifactNames(exportHistoricalTranscripts); - yield* bundleStore.getArtifactNames(); - } - - /** - * @param {string} name - * @returns {AsyncIterableIterator} - */ - function getArtifact(name) { - typeof name === 'string' || Fail`artifact name must be a string`; - const [type] = name.split('.', 1); - - if (type === 'snapshot') { - return snapStore.exportSnapshot(name, exportHistoricalSnapshots); - } else if (type === 'transcript') { - return transcriptStore.exportSpan(name, exportHistoricalTranscripts); - } else if (type === 'bundle') { - return bundleStore.exportBundle(name); - } else { - throw Fail`invalid artifact type ${q(type)}`; - } - } - - const sqlAbort = db.prepare('ROLLBACK'); - - async function close() { - // After all the data has been extracted, always abort the export - // transaction to ensure that the export was read-only (i.e., that no bugs - // inadvertantly modified the database). - sqlAbort.run(); - db.close(); - } - - return harden({ - getExportData, - getArtifactNames, - getArtifact, - close, - }); -} - -/** - * Function used to create a new swingStore from an object implementing the - * exporter API. The exporter API may be provided by a swingStore instance, or - * implemented by a host to restore data that was previously exported. - * - * @typedef {(exporter: SwingStoreExporter) => Promise} ImportSwingStore - */ - /** * A swing store holds the state of a swingset instance. This "store" is * actually several different stores of different types that travel as a flock @@ -342,7 +130,7 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { * * @returns {SwingStore} */ -function makeSwingStore(dirPath, forceReset, options = {}) { +export function makeSwingStore(dirPath, forceReset, options = {}) { const { serialized } = options; if (serialized) { Buffer.isBuffer(serialized) || Fail`options.serialized must be Buffer`; @@ -374,7 +162,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { } } fs.mkdirSync(dirPath, { recursive: true }); - filePath = path.join(dirPath, 'swingstore.sqlite'); + filePath = dbFileInDirectory(dirPath); } else { filePath = ':memory:'; } @@ -449,13 +237,6 @@ function makeSwingStore(dirPath, forceReset, options = {}) { // Perform all database initialization in a single transaction sqlBeginTransaction.run(); - db.exec(` - CREATE TABLE IF NOT EXISTS kvStore ( - key TEXT, - value TEXT, - PRIMARY KEY (key) - ) - `); db.exec(` CREATE TABLE IF NOT EXISTS pendingExports ( @@ -465,10 +246,32 @@ function makeSwingStore(dirPath, forceReset, options = {}) { ) `); + let exportCallback; + function setExportCallback(cb) { + typeof cb === 'function' || Fail`callback must be a function`; + exportCallback = cb; + } + if (options.exportCallback) { + setExportCallback(options.exportCallback); + } + + const sqlAddPendingExport = db.prepare(` + INSERT INTO pendingExports (key, value) + VALUES (?, ?) + ON CONFLICT DO UPDATE SET value = excluded.value + `); + + function noteExport(key, value) { + if (exportCallback) { + sqlAddPendingExport.run(key, value); + } + } + + const kvStore = makeKVStore(db, ensureTxn, trace); + const { dumpTranscripts, ...transcriptStore } = makeTranscriptStore( db, ensureTxn, - // eslint-disable-next-line no-use-before-define noteExport, { keepTranscripts, @@ -478,7 +281,6 @@ function makeSwingStore(dirPath, forceReset, options = {}) { db, ensureTxn, makeSnapStoreIO(), - // eslint-disable-next-line no-use-before-define noteExport, { keepSnapshots, @@ -487,7 +289,6 @@ function makeSwingStore(dirPath, forceReset, options = {}) { const { dumpBundles, ...bundleStore } = makeBundleStore( db, ensureTxn, - // eslint-disable-next-line no-use-before-define noteExport, ); @@ -496,20 +297,11 @@ function makeSwingStore(dirPath, forceReset, options = {}) { // At this point, all database initialization should be complete, so commit now. sqlCommit.run(); - let exportCallback; - function setExportCallback(cb) { - typeof cb === 'function' || Fail`callback must be a function`; - exportCallback = cb; - } - if (options.exportCallback) { - setExportCallback(options.exportCallback); - } - let inCrank = false; function diskUsage() { if (dirPath) { - const dataFilePath = `${dirPath}/swingstore.sqlite`; + const dataFilePath = dbFileInDirectory(dirPath); const stat = fs.statSync(dataFilePath); return stat.size; } else { @@ -517,154 +309,13 @@ function makeSwingStore(dirPath, forceReset, options = {}) { } } - const sqlKVGet = db.prepare(` - SELECT value - FROM kvStore - WHERE key = ? - `); - sqlKVGet.pluck(true); - - /** - * Obtain the value stored for a given key. - * - * @param {string} key The key whose value is sought. - * - * @returns {string | undefined} the (string) value for the given key, or - * undefined if there is no such value. - * - * @throws if key is not a string. - */ - function get(key) { - typeof key === 'string' || Fail`key must be a string`; - return sqlKVGet.get(key); - } - - const sqlKVGetNextKey = db.prepare(` - SELECT key - FROM kvStore - WHERE key > ? - LIMIT 1 - `); - sqlKVGetNextKey.pluck(true); - - /** - * getNextKey enables callers to iterate over all keys within a - * given range. To build an iterator of all keys from start - * (inclusive) to end (exclusive), do: - * - * function* iterate(start, end) { - * if (kvStore.has(start)) { - * yield start; - * } - * let prev = start; - * while (true) { - * let next = kvStore.getNextKey(prev); - * if (!next || next >= end) { - * break; - * } - * yield next; - * prev = next; - * } - * } - * - * @param {string} previousKey The key returned will always be later than this one. - * - * @returns {string | undefined} a key string, or undefined if we reach the end of the store - * - * @throws if previousKey is not a string - */ - - function getNextKey(previousKey) { - typeof previousKey === 'string' || Fail`previousKey must be a string`; - return sqlKVGetNextKey.get(previousKey); - } - - /** - * Test if the state contains a value for a given key. - * - * @param {string} key The key that is of interest. - * - * @returns {boolean} true if a value is stored for the key, false if not. - * - * @throws if key is not a string. - */ - function has(key) { - typeof key === 'string' || Fail`key must be a string`; - return get(key) !== undefined; - } - - const sqlKVSet = db.prepare(` - INSERT INTO kvStore (key, value) - VALUES (?, ?) - ON CONFLICT DO UPDATE SET value = excluded.value - `); - - /** - * Store a value for a given key. The value will replace any prior value if - * there was one. - * - * @param {string} key The key whose value is being set. - * @param {string} value The value to set the key to. - * - * @throws if either parameter is not a string. - */ - function set(key, value) { - typeof key === 'string' || Fail`key must be a string`; - typeof value === 'string' || Fail`value must be a string`; - // synchronous read after write within a transaction is safe - // The transaction's overall success will be awaited during commit - ensureTxn(); - sqlKVSet.run(key, value); - trace('set', key, value); - } - - const sqlKVDel = db.prepare(` - DELETE FROM kvStore - WHERE key = ? - `); - - /** - * Remove any stored value for a given key. It is permissible for there to - * be no existing stored value for the key. - * - * @param {string} key The key whose value is to be deleted - * - * @throws if key is not a string. - */ - function del(key) { - typeof key === 'string' || Fail`key must be a string`; - ensureTxn(); - sqlKVDel.run(key); - trace('del', key); - } - - const kvStore = { - has, - get, - getNextKey, - set, - delete: del, - }; - - const sqlAddPendingExport = db.prepare(` - INSERT INTO pendingExports (key, value) - VALUES (?, ?) - ON CONFLICT DO UPDATE SET value = excluded.value - `); - - function noteExport(key, value) { - if (exportCallback) { - sqlAddPendingExport.run(key, value); - } - } - const kernelKVStore = { ...kvStore, set(key, value) { typeof key === 'string' || Fail`key must be a string`; const keyType = getKeyType(key); keyType !== 'host' || Fail`kernelKVStore refuses host keys`; - set(key, value); + kvStore.set(key, value); if (keyType === 'consensus') { noteExport(`kv.${key}`, value); crankhasher.add('add'); @@ -679,7 +330,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { typeof key === 'string' || Fail`key must be a string`; const keyType = getKeyType(key); keyType !== 'host' || Fail`kernelKVStore refuses host keys`; - del(key); + kvStore.delete(key); if (keyType === 'consensus') { noteExport(`kv.${key}`, undefined); crankhasher.add('delete'); @@ -695,12 +346,12 @@ function makeSwingStore(dirPath, forceReset, options = {}) { set(key, value) { const keyType = getKeyType(key); keyType === 'host' || Fail`hostKVStore requires host keys`; - set(key, value); + kvStore.set(key, value); }, delete(key) { const keyType = getKeyType(key); keyType === 'host' || Fail`hostKVStore requires host keys`; - del(key); + kvStore.delete(key); }, }; @@ -740,7 +391,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { resetCrankhash(); // Get the old activityhash - let oldActivityhash = get('activityhash'); + let oldActivityhash = kvStore.get('activityhash'); if (oldActivityhash === undefined) { oldActivityhash = ''; } @@ -756,7 +407,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { // Store the new activityhash const activityhash = hasher.finish(); - set('activityhash', activityhash); + kvStore.set('activityhash', activityhash); // Need to explicitly call noteExport here because activityhash is written // directly to the low-level store to avoid recursive hashing, which // bypasses the normal notification mechanism @@ -766,7 +417,7 @@ function makeSwingStore(dirPath, forceReset, options = {}) { } function getActivityhash() { - return get('activityhash') || ''; + return kvStore.get('activityhash') || ''; } const sqlExportsGet = db.prepare(` @@ -823,6 +474,9 @@ function makeSwingStore(dirPath, forceReset, options = {}) { stopTrace(); } + /** @type {import('./internal.js').SwingStoreInternal} */ + const internal = harden({ snapStore, transcriptStore, bundleStore }); + /** * Return a Buffer with the entire DB state, useful for cloning a * small swingstore in unit tests. @@ -904,11 +558,6 @@ function makeSwingStore(dirPath, forceReset, options = {}) { serialize, dump, }; - const internal = { - snapStore, - transcriptStore, - bundleStore, - }; return harden({ kernelStorage, @@ -941,198 +590,6 @@ export function initSwingStore(dirPath = null, options = {}) { return makeSwingStore(dirPath, true, options); } -function parseVatArtifactExportKey(key) { - const parts = key.split('.'); - const [_type, vatID, rawPos] = parts; - // prettier-ignore - parts.length === 3 || - Fail`expected artifact name of the form '{type}.{vatID}.{pos}', saw ${q(key)}`; - const isCurrent = rawPos === 'current'; - let pos; - if (isCurrent) { - pos = -1; - } else { - pos = Number(rawPos); - } - - return { vatID, isCurrent, pos }; -} - -function artifactKey(type, vatID, pos) { - return `${type}.${vatID}.${pos}`; -} - -/** - * @param {SwingStoreExporter} exporter - * @param {string | null} [dirPath] - * @param {object} options - * @returns {Promise} - */ -export async function importSwingStore(exporter, dirPath = null, options = {}) { - if (dirPath) { - typeof dirPath === 'string' || Fail`dirPath must be a string`; - } - const { includeHistorical = false } = options; - const store = makeSwingStore(dirPath, true, options); - const { kernelStorage, internal } = store; - - // Artifact metadata, keyed as `${type}.${vatID}.${pos}` - // - // Note that this key is almost but not quite the artifact name, since the - // names of transcript span artifacts also include the endPos, but the endPos - // value is in flux until the span is complete. - const artifactMetadata = new Map(); - - // Each vat requires a transcript span and (usually) a snapshot. This table - // tracks which of these we've seen, keyed by vatID. - // vatID -> { snapshotKey: metadataKey, transcriptKey: metatdataKey } - const vatArtifacts = new Map(); - const bundleArtifacts = new Map(); - - for await (const [key, value] of exporter.getExportData()) { - const [tag] = key.split('.', 1); - const subKey = key.substring(tag.length + 1); - if (tag === 'kv') { - // 'kv' keys contain individual kvStore entries - if (value == null) { - // Note '==' rather than '===': any nullish value implies deletion - kernelStorage.kvStore.delete(subKey); - } else { - kernelStorage.kvStore.set(subKey, value); - } - } else if (tag === 'bundle') { - // 'bundle' keys contain bundle IDs - if (value == null) { - bundleArtifacts.delete(key); - } else { - bundleArtifacts.set(key, value); - } - } else if (tag === 'transcript' || tag === 'snapshot') { - // 'transcript' and 'snapshot' keys contain artifact description info. - assert(value); // make TypeScript shut up - const { vatID, isCurrent, pos } = parseVatArtifactExportKey(key); - if (isCurrent) { - const vatInfo = vatArtifacts.get(vatID) || {}; - if (tag === 'snapshot') { - // `export.snapshot.{vatID}.current` directly identifies the current snapshot artifact - vatInfo.snapshotKey = value; - } else if (tag === 'transcript') { - // `export.transcript.${vatID}.current` contains a metadata record for the current - // state of the current transcript span as of the time of export - const metadata = JSON.parse(value); - vatInfo.transcriptKey = artifactKey(tag, vatID, metadata.startPos); - artifactMetadata.set(vatInfo.transcriptKey, metadata); - } - vatArtifacts.set(vatID, vatInfo); - } else { - artifactMetadata.set(artifactKey(tag, vatID, pos), JSON.parse(value)); - } - } else { - Fail`unknown artifact type tag ${q(tag)} on import`; - } - } - - // At this point we should have acquired the entire KV store state, plus - // sufficient metadata to identify the complete set of artifacts we'll need to - // fetch along with the information required to validate each of them after - // fetching. - // - // Depending on how the export was parameterized, the metadata may also include - // information about historical artifacts that we might or might not actually - // fetch depending on how this import was parameterized - - // Fetch the set of current artifacts. - - // Keep track of fetched artifacts in this set so we don't fetch them a second - // time if we are trying for historical artifacts also. - const fetchedArtifacts = new Set(); - - for await (const [vatID, vatInfo] of vatArtifacts.entries()) { - // For each vat, we *must* have a transcript span. If this is not the very - // first transcript span in the history of that vat, then we also must have - // a snapshot for the state of the vat immediately prior to when the - // transcript span begins. - vatInfo.transcriptKey || - Fail`missing current transcript key for vat ${q(vatID)}`; - const transcriptInfo = artifactMetadata.get(vatInfo.transcriptKey); - transcriptInfo || Fail`missing transcript metadata for vat ${q(vatID)}`; - let snapshotInfo; - if (vatInfo.snapshotKey) { - snapshotInfo = artifactMetadata.get(vatInfo.snapshotKey); - snapshotInfo || Fail`missing snapshot metadata for vat ${q(vatID)}`; - } - if (!snapshotInfo) { - transcriptInfo.startPos === 0 || - Fail`missing current snapshot for vat ${q(vatID)}`; - } else { - snapshotInfo.snapPos + 1 === transcriptInfo.startPos || - Fail`current transcript for vat ${q(vatID)} doesn't go with snapshot`; - fetchedArtifacts.add(vatInfo.snapshotKey); - } - await (!snapshotInfo || - internal.snapStore.importSnapshot( - vatInfo.snapshotKey, - exporter, - snapshotInfo, - )); - - const transcriptArtifactName = `${vatInfo.transcriptKey}.${transcriptInfo.endPos}`; - await internal.transcriptStore.importSpan( - transcriptArtifactName, - exporter, - transcriptInfo, - ); - fetchedArtifacts.add(transcriptArtifactName); - } - const bundleArtifactNames = Array.from(bundleArtifacts.keys()).sort(); - for await (const bundleArtifactName of bundleArtifactNames) { - await internal.bundleStore.importBundle( - bundleArtifactName, - exporter, - bundleArtifacts.get(bundleArtifactName), - ); - } - - if (!includeHistorical) { - // eslint-disable-next-line @jessie.js/no-nested-await - await exporter.close(); - return store; - } - - // If we're also importing historical artifacts, have the exporter enumerate - // the complete set of artifacts it has and fetch all of them except for the - // ones we've already fetched. - for await (const artifactName of exporter.getArtifactNames()) { - if (fetchedArtifacts.has(artifactName)) { - continue; - } - let fetchedP; - if (artifactName.startsWith('snapshot.')) { - fetchedP = internal.snapStore.importSnapshot( - artifactName, - exporter, - artifactMetadata.get(artifactName), - ); - } else if (artifactName.startsWith('transcript.')) { - // strip endPos off artifact name - const metadataKey = artifactName.split('.').slice(0, 3).join('.'); - fetchedP = internal.transcriptStore.importSpan( - artifactName, - exporter, - artifactMetadata.get(metadataKey), - ); - } else if (artifactName.startsWith('bundle.')) { - // already taken care of - continue; - } else { - Fail`unknown artifact type: ${artifactName}`; - } - await fetchedP; - } - await exporter.close(); - return store; -} - /** * Open a persistent swingset store. If there is no existing store at the given * `dirPath`, a new, empty store will be created. @@ -1163,7 +620,7 @@ export function openSwingStore(dirPath, options = {}) { export function isSwingStore(dirPath) { typeof dirPath === 'string' || Fail`dirPath must be a string`; if (fs.existsSync(dirPath)) { - const storeFile = path.resolve(dirPath, 'swingstore.sqlite'); + const storeFile = dbFileInDirectory(dirPath); if (fs.existsSync(storeFile)) { return true; } diff --git a/packages/swing-store/src/transcriptStore.js b/packages/swing-store/src/transcriptStore.js index a8f2b4f3ea7..3d300bec2d7 100644 --- a/packages/swing-store/src/transcriptStore.js +++ b/packages/swing-store/src/transcriptStore.js @@ -6,7 +6,7 @@ import BufferLineTransform from '@agoric/internal/src/node/buffer-line-transform import { createSHA256 } from './hasher.js'; /** - * @typedef { import('./swingStore').SwingStoreExporter } SwingStoreExporter + * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter * * @typedef {{ * initTranscript: (vatID: string) => void, diff --git a/packages/swing-store/src/types.d.ts b/packages/swing-store/src/types.d.ts new file mode 100644 index 00000000000..b1697d3d323 --- /dev/null +++ b/packages/swing-store/src/types.d.ts @@ -0,0 +1,14 @@ +export type { + SwingStore, + SwingStoreKernelStorage, + SwingStoreHostStorage, +} from './src/swingStore.js'; +export type { KVStore } from './src/kvStore.js'; +export type { BundleStore } from './src/bundleStore.js'; +export type { + SnapStore, + SnapshotResult, + SnapshotInfo, +} from './src/snapStore.js'; +export type { TranscriptStore } from './src/transcriptStore.js'; +export type { SwingStoreExporter, ExportMode } from './src/exporter.js'; diff --git a/packages/swing-store/src/types.js b/packages/swing-store/src/types.js new file mode 100644 index 00000000000..a863ae2d2fa --- /dev/null +++ b/packages/swing-store/src/types.js @@ -0,0 +1,6 @@ +// Types for the public API + +// Everything this "exports" actually comes from the neighboring types.d.ts file + +// Ensure this is a module. +export {}; diff --git a/packages/swing-store/src/util.js b/packages/swing-store/src/util.js index 4d4c765288e..f22f974d325 100644 --- a/packages/swing-store/src/util.js +++ b/packages/swing-store/src/util.js @@ -1,3 +1,4 @@ +import path from 'path'; import { Buffer } from 'buffer'; /** @@ -5,7 +6,7 @@ import { Buffer } from 'buffer'; * 'stream/consumers' package, which unfortunately only exists in newer versions * of Node. * - * @param {import('./swingStore').AnyIterable} inStream + * @param {import('./exporter').AnyIterable} inStream */ export const buffer = async inStream => { const chunks = []; @@ -14,3 +15,8 @@ export const buffer = async inStream => { } return Buffer.concat(chunks); }; + +export function dbFileInDirectory(dirPath) { + const filePath = path.resolve(dirPath, 'swingstore.sqlite'); + return filePath; +} diff --git a/packages/swing-store/test/test-bundles.js b/packages/swing-store/test/test-bundles.js index c622583210b..c62d3f36feb 100644 --- a/packages/swing-store/test/test-bundles.js +++ b/packages/swing-store/test/test-bundles.js @@ -8,7 +8,7 @@ import { importSwingStore, initSwingStore, makeSwingStoreExporter, -} from '../src/swingStore.js'; +} from '../src/index.js'; import { buffer } from '../src/util.js'; function makeB0ID(bundle) { diff --git a/packages/swing-store/test/test-exportImport.js b/packages/swing-store/test/test-exportImport.js index 852526582f7..8c30bfe19dc 100644 --- a/packages/swing-store/test/test-exportImport.js +++ b/packages/swing-store/test/test-exportImport.js @@ -9,11 +9,9 @@ import test from 'ava'; import tmp from 'tmp'; import bundleSource from '@endo/bundle-source'; -import { - initSwingStore, - makeSwingStoreExporter, - importSwingStore, -} from '../src/swingStore.js'; +import { initSwingStore } from '../src/swingStore.js'; +import { makeSwingStoreExporter } from '../src/exporter.js'; +import { importSwingStore } from '../src/importer.js'; function makeExportLog() { const exportLog = []; From 0a2ba8aa66800478738131ac2ff237e6ab22045e Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 25 Jul 2023 21:10:44 -0600 Subject: [PATCH 075/109] feat(upgrade-test): plumb and use `UPGRADE_INFO` --- packages/deployment/upgrade-test/Dockerfile | 30 ++++++++++++++----- packages/deployment/upgrade-test/Makefile | 21 ++++++++----- packages/deployment/upgrade-test/Readme.md | 10 +++++++ .../agoric-upgrade-10-to-11/.keep | 0 .../upgrade-test-scripts/start_ag0.sh | 13 +++++++- .../upgrade-test-scripts/start_to_to.sh | 13 +++++++- 6 files changed, 71 insertions(+), 16 deletions(-) create mode 100644 packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10-to-11/.keep diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index 34d01cabd0f..d57e2402322 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -3,7 +3,8 @@ ARG BOOTSTRAP_MODE=main # on agoric-uprade-7-2, with upgrade to agoric-upgrade-8 FROM ghcr.io/agoric/ag0:agoric-upgrade-7-2 as agoric-upgrade-7-2 ARG BOOTSTRAP_MODE -ENV UPGRADE_TO=agoric-upgrade-8 THIS_NAME=agoric-upgrade-7-2 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} +ARG UPGRADE_INFO_8 +ENV UPGRADE_TO=agoric-upgrade-8 UPGRADE_INFO=${UPGRADE_INFO_8} THIS_NAME=agoric-upgrade-7-2 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} RUN echo "${BOOTSTRAP_MODE}" RUN mkdir -p /usr/src/agoric-sdk/upgrade-test-scripts WORKDIR /usr/src/agoric-sdk/ @@ -28,8 +29,8 @@ RUN . ./upgrade-test-scripts/start_to_to.sh ARG DEST_IMAGE #this is agoric-upgrade-8-1 aka pismoB FROM ghcr.io/agoric/agoric-sdk:30 as agoric-upgrade-8-1 -ARG BOOTSTRAP_MODE -ENV THIS_NAME=agoric-upgrade-8-1 UPGRADE_TO=agoric-upgrade-9 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} +ARG BOOTSTRAP_MODE UPGRADE_INFO_9 +ENV THIS_NAME=agoric-upgrade-8-1 UPGRADE_TO=agoric-upgrade-9 UPGRADE_INFO=${UPGRADE_INFO_9} BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ @@ -42,8 +43,8 @@ RUN . ./upgrade-test-scripts/start_to_to.sh ARG DEST_IMAGE # this is agoric-upgrade-9 / pismoC with upgrade to agoric-upgrade-10 FROM ghcr.io/agoric/agoric-sdk:31 as agoric-upgrade-9 -ARG BOOTSTRAP_MODE -ENV THIS_NAME=agoric-upgrade-9 UPGRADE_TO=agoric-upgrade-10 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} +ARG BOOTSTRAP_MODE UPGRADE_INFO_10 +ENV THIS_NAME=agoric-upgrade-9 UPGRADE_TO=agoric-upgrade-10 UPGRADE_INFO=${UPGRADE_INFO_10} BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ @@ -59,7 +60,7 @@ ARG DEST_IMAGE #this is agoric-upgrade-10 / vaults FROM ghcr.io/agoric/agoric-sdk:35 as agoric-upgrade-10 ARG BOOTSTRAP_MODE -ENV THIS_NAME=agoric-upgrade-10 UPGRADE_TO=agoric-upgrade-11 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} +ENV THIS_NAME=agoric-upgrade-10 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ @@ -69,6 +70,21 @@ RUN chmod +x ./upgrade-test-scripts/*.sh SHELL ["/bin/bash", "-c"] RUN . ./upgrade-test-scripts/start_to_to.sh +ARG DEST_IMAGE +#this is agoric-upgrade-10 upgrading to 11 +#it's a separate target because agoric-upgrade-10 takes so long to test +FROM ghcr.io/agoric/agoric-sdk:34 as agoric-upgrade-10-to-11 +ARG BOOTSTRAP_MODE UPGRADE_INFO_11 +ENV THIS_NAME=agoric-upgrade-10-to-11 UPGRADE_TO=agoric-upgrade-11 UPGRADE_INFO=${UPGRADE_INFO_11} BOOTSTRAP_MODE=${BOOTSTRAP_MODE} + +WORKDIR /usr/src/agoric-sdk/ +COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ +COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ +COPY --from=agoric-upgrade-10 /root/.agoric /root/.agoric +RUN chmod +x ./upgrade-test-scripts/*.sh +SHELL ["/bin/bash", "-c"] +RUN . ./upgrade-test-scripts/start_to_to.sh + ARG DEST_IMAGE #this is agoric-upgrade-11 / vaults+1 FROM ${DEST_IMAGE} as agoric-upgrade-11 @@ -79,7 +95,7 @@ ENV THIS_NAME=agoric-upgrade-11 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ COPY ./bash_entrypoint.sh ./env_setup.sh ./start_to_to.sh ./upgrade-test-scripts/ COPY ./${THIS_NAME} ./upgrade-test-scripts/${THIS_NAME}/ -COPY --from=agoric-upgrade-10 /root/.agoric /root/.agoric +COPY --from=agoric-upgrade-10-to-11 /root/.agoric /root/.agoric RUN apt install -y tmux SHELL ["/bin/bash", "-c"] RUN chmod +x ./upgrade-test-scripts/*.sh diff --git a/packages/deployment/upgrade-test/Makefile b/packages/deployment/upgrade-test/Makefile index f5a55e66ba0..cd9c24bca50 100644 --- a/packages/deployment/upgrade-test/Makefile +++ b/packages/deployment/upgrade-test/Makefile @@ -14,23 +14,30 @@ endif local_sdk: (cd ../ && make docker-build-sdk) +BUILD = docker build --progress=plain $(BUILD_OPTS) \ + --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) \ + -f Dockerfile upgrade-test-scripts + agoric-upgrade-7-2: - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-7-2 -t $(REPOSITORY):agoric-upgrade-7-2 -f Dockerfile upgrade-test-scripts + $(BUILD) --target agoric-upgrade-7-2 -t $(REPOSITORY):agoric-upgrade-7-2 agoric-upgrade-8: agoric-upgrade-7-2 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-8 -t $(REPOSITORY):agoric-upgrade-8 -f Dockerfile upgrade-test-scripts + $(BUILD) --target agoric-upgrade-8 -t $(REPOSITORY):agoric-upgrade-8 agoric-upgrade-8-1: agoric-upgrade-8 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-8-1 -t $(REPOSITORY):agoric-upgrade-8-1 -f Dockerfile upgrade-test-scripts + $(BUILD) --target agoric-upgrade-8-1 -t $(REPOSITORY):agoric-upgrade-8-1 agoric-upgrade-9: agoric-upgrade-8-1 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-9 -t $(REPOSITORY):agoric-upgrade-9 -f Dockerfile upgrade-test-scripts + $(BUILD) --target agoric-upgrade-9 -t $(REPOSITORY):agoric-upgrade-9 agoric-upgrade-10: agoric-upgrade-9 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-10 -t $(REPOSITORY):agoric-upgrade-10 -f Dockerfile upgrade-test-scripts + $(BUILD) --target agoric-upgrade-10 -t $(REPOSITORY):agoric-upgrade-10 + +agoric-upgrade-10-to-11: agoric-upgrade-10 + $(BUILD) --target agoric-upgrade-10-to-11 -t $(REPOSITORY):agoric-upgrade-10-to-11 -agoric-upgrade-11: agoric-upgrade-10 - docker build --build-arg BOOTSTRAP_MODE=$(BOOTSTRAP_MODE) --build-arg DEST_IMAGE=$(DEST_IMAGE) --progress=plain --target agoric-upgrade-11 -t $(REPOSITORY):agoric-upgrade-11 -f Dockerfile upgrade-test-scripts +agoric-upgrade-11: agoric-upgrade-10-to-11 + $(BUILD) --target agoric-upgrade-11 -t $(REPOSITORY):agoric-upgrade-11 # build main bootstrap build: $(TARGET) diff --git a/packages/deployment/upgrade-test/Readme.md b/packages/deployment/upgrade-test/Readme.md index 44e21de4841..3b3e233ecb4 100644 --- a/packages/deployment/upgrade-test/Readme.md +++ b/packages/deployment/upgrade-test/Readme.md @@ -77,6 +77,16 @@ docker ps docker attach sweet_edison ``` +**To pass specific `software-upgrade --upgrade-info`** + +```shell +json='{"some":"json","here":123}' +make build BUILD_OPTS="--build-arg UPGRADE_INFO_11='$json'" +``` + +Search this directory for `UPGRADE_INFO` if you want to see how it is plumbed +through. + **To test CLI** You can point your local CLI tools to the chain running in Docker. Our Docker config binds on the same port (26656) as running a local chain. So you can use the agoric-cli commands on the Docker chain the same way. But note that the Cosmos account keys will be different from in your dev keyring. diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10-to-11/.keep b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10-to-11/.keep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/start_ag0.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/start_ag0.sh index 251df7e4015..4281ae92318 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/start_ag0.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/start_ag0.sh @@ -65,7 +65,18 @@ if [[ "$BOOTSTRAP_MODE" == "test" ]]; then UPGRADE_TO=${UPGRADE_TO//agoric-/agorictest-} fi -ag0 tx gov submit-proposal software-upgrade "$UPGRADE_TO" --upgrade-height="$height" --title="Upgrade to ${UPGRADE_TO}" --description="upgrades" --from=validator --chain-id="$CHAINID" --yes --keyring-backend=test +info=${UPGRADE_INFO-"{}"} +if echo "$info" | jq .; then : +else + status=$? + echo "Upgrade info is not valid JSON: $info" + exit $status +fi +ag0 tx gov submit-proposal software-upgrade "$UPGRADE_TO" \ + --upgrade-height="$height" --upgrade-info="$info" \ + --title="Upgrade to ${UPGRADE_TO}" --description="upgrades" \ + --from=validator --chain-id="$CHAINID" \ + --yes --keyring-backend=test waitForBlock voteLatestProposalAndWait diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh index d8f758b9ed8..2ba2d2a8e13 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/start_to_to.sh @@ -39,7 +39,18 @@ if [[ "$DEST" != "1" ]]; then voting_period_s=10 latest_height=$(agd status | jq -r .SyncInfo.latest_block_height) height=$(( $latest_height + $voting_period_s + 10 )) - agd tx gov submit-proposal software-upgrade "$UPGRADE_TO" --upgrade-height="$height" --title="Upgrade to ${UPGRADE_TO}" --description="upgrades" --from=validator --chain-id="$CHAINID" --yes --keyring-backend=test + info=${UPGRADE_INFO-"{}"} + if echo "$info" | jq .; then : + else + status=$? + echo "Upgrade info is not valid JSON: $info" + exit $status + fi + agd tx gov submit-proposal software-upgrade "$UPGRADE_TO" \ + --upgrade-height="$height" --upgrade-info="$info" \ + --title="Upgrade to ${UPGRADE_TO}" --description="upgrades" \ + --from=validator --chain-id="$CHAINID" \ + --yes --keyring-backend=test waitForBlock voteLatestProposalAndWait From 03e323a55ffeb98dcc84a57050a5d3fc881899b8 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Mon, 7 Aug 2023 21:44:19 -0700 Subject: [PATCH 076/109] fix: rewrite importSwingStore to preserve metadata properly Previously, the swingstore importer would ignore "historical metadata": records (with hashes) for transcripts and heap snapshots that are not strictly necessary to rebuild workers. This was a mistake: our intention was to always preserve these hashes, so that we might safely (with integrity) repopulate the corresponding data in the future, using artifacts from untrusted sources. This commit rewrites the importer to record *all* metadata records in the first pass, regardless of whether we want historical data or not. All of these records will be stubs: they contain hashes, but are missing the actual bundle or snapshot or transcript items, as if they had been pruned. Then, in the second pass, we populate those stubs using the matching artifacts (or ignore the historical ones, as configured by the `includeHistorical` option). A final `assertComplete` pass insists that all the important (non-historical) records are fully populated. The exporter was updated to omit empty artifacts. New tests were added to assert that metadata is preserved regardless of import mode, and that the `assertComplete` pass really catches everything. Also, we check that an import throws if given a mis-sized artifact, like a transcript span that is missing a few items. A new `docs/swingstore.md` was added to describe the data model, including what it means for records to be pruned, and `docs/data-export.md` was updated. Note: this commit changes the schema of the `snapshots` table (to support temporarily-unpopulated `inUse = 1` snapshot data). To be precise, any swing-store created by this version (either via `initSwingStore` or `importSwingStore`) will get the new schema: pre-existing DBs opened with `openSwingStore` will continue to use the old/strict schema. This is fine for now, but as the comments in snapStore.js explain, we'll need to implement DB schema versioning and upgrade (https://github.com/Agoric/agoric-sdk/issues/8089) before we can safely change any non-`importSwingStore` code to create unpopulated `inUse=1` records. fixes #8025 --- packages/swing-store/docs/data-export.md | 24 +- packages/swing-store/docs/swingstore.md | 52 ++ packages/swing-store/src/assertComplete.js | 22 + packages/swing-store/src/bundleStore.js | 162 ++++-- packages/swing-store/src/exporter.js | 28 +- packages/swing-store/src/importer.js | 235 +++------ packages/swing-store/src/snapStore.js | 162 ++++-- packages/swing-store/src/transcriptStore.js | 170 +++++-- packages/swing-store/test/test-bundles.js | 16 +- packages/swing-store/test/test-export.js | 308 ++++++++++++ .../swing-store/test/test-exportImport.js | 82 ++- packages/swing-store/test/test-import.js | 476 ++++++++++++++++++ packages/swing-store/test/util.js | 26 + 13 files changed, 1416 insertions(+), 347 deletions(-) create mode 100644 packages/swing-store/docs/swingstore.md create mode 100644 packages/swing-store/src/assertComplete.js create mode 100644 packages/swing-store/test/test-export.js create mode 100644 packages/swing-store/test/test-import.js create mode 100644 packages/swing-store/test/util.js diff --git a/packages/swing-store/docs/data-export.md b/packages/swing-store/docs/data-export.md index c538448fa21..2eae92bec7b 100644 --- a/packages/swing-store/docs/data-export.md +++ b/packages/swing-store/docs/data-export.md @@ -16,7 +16,7 @@ The SwingStore export protocol defines two stages (effectively two datasets). Th Each time a SwingStore API is used to modify the state somehow (e.g. adding/changing/deleting a `kvStore` entry, or pushing a new item on to a transcript), the contents of both datasets may change. New first-stage entries can be created, existing ones may be modified or deleted. And the set of second-stage artifacts may change. -These export data/artifact changes can happen when calling into the kernel (e.g. invoking the external API of a device, causing the device code to change its own state or push messages onto the run-queue), or by normal kernel operations as it runs (any time `controller.run()` is executing). When the kernel is idle (after `controller.run()` has completed), the kernel will not make any changes to the SwingStore, and both datasets will be stable. +These export data/artifact changes can happen when calling into the kernel (e.g. invoking the external API of a device, causing the device code to change its own state or push messages onto the run-queue), or by normal kernel operations as it runs (any time `controller.run()` is executing). When the kernel is idle (after `controller.run()` has completed), and `hostStorage.commit()` is called, the kernel will not make any changes to the SwingStore, and both datasets will be stable. Among other things, the SwingStore records a transcript of deliveries for each vat. The collection of all deliveries to a particular vat since its last heap snapshot was written is called the "current span". For each vat, the first-stage export data will record a single record that remembers the extent and the hash of the current span. This record then refers to a second-stage export artifact that contains the actual transcript contents. @@ -83,6 +83,8 @@ So, to include SwingStore data in this state-sync snapshot, we need a way to get To support this, SwingStore has an "incremental export" mode. This is activated when the host application supplies an "export callback" option to the SwingStore instance constructor. Instead of retrieving the entire first-stage export data at the end of the block, the host application will be continuously notified about changes to this data as the kernel executes. The host application can then incorporate those entries into an existing hashed Merkle tree (e.g. the cosmos-sdk IAVL tree), whose root hash is included in the consensus block hash. Every time the callback is given `(key, value)`, the host should add a new (or modify some existing) IAVL entry, using an IAVL key within some range dedicated to the SwingStore first-stage export data. When the callback receives `(key, undefined)` or `(key, null)`, it should delete the entry. In this way, the IAVL tree maintains a "shadow copy" of the first-stage export data at all times, making the contents both covered by the consensus hash, and automatically included in the cosmos-sdk IAVL tree where it will become available to the new validator as it begins to reconstruct the SwingStore. +The export callback must be established from the very beginning, so it includes all changes made during kernel initialization. + All validator nodes use this export callback, even if they never perform the rest of the export process, to ensure that the consensus state includes the entire first-stage dataset. (Note that the first stage data is generally smaller than the full dataset, making this relatively inexpensive). Then, on the few occasions when the application needs to build a full state-sync snapshot, it can ask the SwingStore (after block commit) for the full set of artifacts that match the most recent commit. @@ -177,18 +179,32 @@ As a result, for each active vat, the first-stage Export Data contains a record The `openSwingStore()` function has an option named `keepTranscripts` (which defaults to `true`), which causes the transcriptStore to retain the old transcript items. A second option named `keepSnapshots` (which defaults to `false`) causes the snapStore to retain the old heap snapshots. Opening the swingStore with a `false` option does not necessarily delete the old items immediately, but they'll probably get deleted the next time the kernel triggers a heap snapshot or transcript-span rollover. Validators who care about minimizing their disk usage will want to set both to `false`. In the future, we will arrange the SwingStore SQLite tables to provide easy `sqlite3` CLI commands that will delete the old data, so validators can also periodically use the CLI command to prune it. -The `getArtifactNames()` API includes an option named `includeHistorical`. If `true`, all available historical artifacts will be included in the export (limited by what the `openSwingStore` options have deleted). If `false`, none will be included. Note that the "export data" is necessarily unaffected: if we *ever* want to validate this optional data, the hashes are mandatory. But the `getArtifactNames()` list will be smaller if you set `includeHistorical = false`. Also, re-exporting from a pruned copy will lack the old data, even if the re-export uses `includeHistorical = true`, because the second SwingStore cannot magically reconstruct the missing data. +When exporting, the `makeSwingStoreExporter()` function takes an `exportMode=` argument. This serves to limit the set of artifacts that will be provided in the export. The defined values of `exportMode` are: +* `current`: include only the current transcript span and current snapshot for each vat: just the minimum set necessary for current operations +* `archival`: include all available transcript spans +* `debug`: include all available transcript spans *and* all available snapshots. The old snapshots are never necessary for normal operations, nor are they likely to be usefor for extreme upgrade scenarios, but they might be useful for some unusual debugging operation + +Note that `exportMode` does not affect the Export Data generated by the exporter (if we *ever* want to validate this optional data, the hashes are mandatory). It only affects the names returned by `getArtifactNames()`: the list will be smaller for `current` than for `archival`. Re-exporting from a pruned copy will lack the old data, even if the re-export uses `archival`, because the second SwingStore cannot magically reconstruct the missing data. Note that when a vat is terminated, we delete all information about it, including transcript items and snapshots, both current and old. This will remove all the Export Data records, and well as the matching artifacts from `getArtifactNames`. +When importing, the `importSwingStore()` function takes an options bag, which has property named `includeHistorical`. This property defaults to `false`, which makes the importer ignore any historical artifacts present in the export dataset. To import the historical transcript spans (and snapshots), you must set it to `true`. + +So, to convey historical transcript spans from one swingstore to another, you must set three options along the way: + +* the original swingstore must be opened with `{ includeHistorical: true }`, otherwise the old spans will be pruned immediately +* the export must use `makeSwingStoreExporter(dirpath, 'archival')`, otherwise the export will omit the old spans +* the import must use `importSwingStore(exporter, dirPath, { includeHistorical: true })`, otherwide teh import will ignore the old spans + ## Implementation Details -SwingStore contains components to accomodate all the various kinds of state that the SwingSet kernel needs to store. This currently consists of three portions: +SwingStore contains components to accommodate all the various kinds of state that the SwingSet kernel needs to store. This currently consists of four portions: * `kvStore`, a general-purpose string/string key-value table * `transcriptStore`: append-only vat deliveries, broken into "spans", delimited by heap snapshot events * `snapshotStore`: binary blobs containing JS engine heap state, to limit transcript replay depth +* `bundleStore`: code bundles that can be imported with `@endo/import-bundle` -Currently, the SwingStore treats transcript spans and heap snapshots as export artifacts, with hashes recorded in the export data for validation (and to remember exactly which artifacts are necessary). The `kvStore` is copied one-to-one into the export data (i.e. we keep a full shadow copy in IAVL), because that is the fastest way to ensure the `kvStore` data is fully available and validated. +Currently, the SwingStore treats transcript spans, heap snapshots, and bundles as export artifacts, with hashes recorded in the export data for validation (and to remember exactly which artifacts are necessary). The `kvStore` is copied one-to-one into the export data (i.e. we keep a full shadow copy in IAVL), because that is the fastest way to ensure the `kvStore` data is fully available and validated. If some day we implement an IAVL-like Merkle tree inside SwingStore, and use it to automatically generate a root hash for the `kvStore` at the end of each block, we will replace this (large) shadow copy with a single `kvStoreRootHash` entry, and add a new export artifact to contain the full contents of the kvStore. This reduce the size of the IAVL tree, as well as the rate of IAVL updates during block execution, at the cost of increased CPU and complexity within SwingStore. diff --git a/packages/swing-store/docs/swingstore.md b/packages/swing-store/docs/swingstore.md new file mode 100644 index 00000000000..56bd174dd28 --- /dev/null +++ b/packages/swing-store/docs/swingstore.md @@ -0,0 +1,52 @@ +# SwingStore Data Model + +The "SwingStore" provides a database to hold SwingSet kernel state, with an API crafted to help both the kernel and the host application mutate, commit, export, and import this state. + +The state is broken up into several pieces, or "stores": + +* `bundleStore`: a string-keyed Bundle-value table, holding source bundles which can be evaluated by `importBundle` to create vats, or new Compartments within a vat +* `transcriptStore`: records a linear sequence of deliveries and syscalls (with results), collectively known as "transcript entries", for each vat +* `snapStore`: records one or more XS heap snapshots for each vat, to rebuild a worker more efficiently than replaying all transcript entries from the beginning +* `kvStore`: a string-keyed string-valued table, which holds everything else. Currently, this holds each vat's c-list and vatstore data, as well as the kernel-wide object and promise tables, and run-queues. + +## Incarnations, Spans, Snapshots + +The kernel tracks the state of one or more vats. Each vat's execution is split into "incarnations", which are separated by a "vat upgrade" (a call to `E(vatAdminFacet).upgrade(newBundleCap, options)`, see https://github.com/Agoric/agoric-sdk/blob/master/packages/SwingSet/docs/vat-upgrade.md for details). Each incarnation gets a new worker, which erases the heap state and only retains durable vatstore data across the upgrade. Every active vat has a "current incarnation", and zero or more "historic incarnations". Only the current incarnation is instantiated. + +Within each incarnation, execution is broken into one or more "spans", with a "current span" and zero or more "historic spans". This breaks up the transcript into corresponding spans. + +Each historic span ends with a `save-snapshot` entry which records the creation and saving of an XS heap snapshot. The initial span starts with a `start-worker` entry, while all non-initial spans start with a `load-snapshot` entry. The final span of historic incarnations each end with a `shutdown-worker` entry. + +Each `save-snapshot` entry adds a new snapshot to the `snapStore`, so each vat has zero or more snapshots, of which the last one is called the "current" or "in-use" snapshot, and the earlier ones are called "historical snapshots". + +(note: the `deliveryNum` counter is scoped to the vat and does not reset at incarnation or span boundaries) + +## Artifacts + +The import/export process (using `makeSwingStoreExporter` and `importSwingStore`) defines some number of "artifacts" to contain much of the SwingStore data. Each bundle is a separate artifact, as is each heap snapshot. Each transcript span is a separate artifact (an aggregate of the individual transcript entries comprising that span). + +During export, the `getArtifactNames()` method provides a list of all available artifacts, while `getArtifact(name)` is used to retrieve the actual data. The import function processes each artifact separately. + +## Populated vs Pruned + +For normal operation, the kernel does not require historical incarnations, spans, or snapshots. It only needs the ability to reconstruct a worker for the current incarnation of each vat, which means loading the current snapshot (if any), and replaying the contents of the current transcript span. + +For this reason, the swingstore must always contain the current transcript span, and the current snapshot (if any), for every vat. + +However, to save space, historical spans/snapshots might be pruned, by deleting their contents from the database (but retaining the metadata, which includes a hash of the contents for later validation). Historical snapshots are pruned by default (unless `openSwingStore()` is given an options bag with `keepSnapshots: true`). Historical spans are not currently pruned (the `keepTranscripts` option defaults to `true`), but that may change. + +In addition, `importSwingStore()` can be used to create a SwingStore from data exported out of some other SwingStore. The export-then-import process might result in a pruned DB in one of three ways: + +* the import-time options might instruct the import process to ignore some of the available data +* the export-time options might have done the same +* the original DB was itself already pruned, so the data was not available in the first place + +In the future, a separate SwingStore API will exist to allow previously-pruned artifacts to be repopulated. Every artifact has a metadata record which *is* included in the export (in the `exportData` section, but separate from the kvStore shadow table entries, see [data-export.md](./data-export.md)), regardless of pruning modes, to ensure that this API can check the integrity of these repopulated artifacts. This reduces the reliance set and trust burden of the repopulation process (we can safely use untrusted artifact providers). + +When a snapshot is pruned, the `snapshots` SQL table row is modified, replacing its `compressedSnapshot` BLOB with a NULL. The other columns are left alone, especially the `hash` column, which retains the integrity-checking metadata to support a future repopulation. + +When a transcript span is pruned, the `transcriptSpans` row is left alone, but the collection of `transcriptItems` rows are deleted. Any span for which all the `transcriptItems` rows are present is said to be "populated", while any span that is missing one or more `transcriptItems` rows is said to be "pruned". (There is no good reason for a span to be only partially pruned, but until we compress historical spans into a single row, in some new table, there remains the possibility of partial pruning). + +During import, we create the metadata first (as the export-data is parsed), then later, we fill in the details as the artifacts are read. + +Bundles are never pruned, however during import, the `bundles` table will temporarily contain rows whose `bundle` BLOB is NULL. diff --git a/packages/swing-store/src/assertComplete.js b/packages/swing-store/src/assertComplete.js new file mode 100644 index 00000000000..62b09465ea6 --- /dev/null +++ b/packages/swing-store/src/assertComplete.js @@ -0,0 +1,22 @@ +/** + * @param {import('./internal.js').SwingStoreInternal} internal + * @param {'operational'} level + * @returns {void} + */ +export function assertComplete(internal, level) { + assert.equal(level, 'operational'); // only option for now + // every bundle must be populated + internal.bundleStore.assertComplete(level); + + // every 'isCurrent' transcript span must have all items + // TODO: every vat with any data must have a isCurrent transcript + // span + internal.transcriptStore.assertComplete(level); + + // every 'inUse' snapshot must be populated + internal.snapStore.assertComplete(level); + + // TODO: every isCurrent span that starts with load-snapshot has a + // matching snapshot (counter-argument: swing-store should not know + // those details about transcript entries) +} diff --git a/packages/swing-store/src/bundleStore.js b/packages/swing-store/src/bundleStore.js index 54076710821..de7d9b3471b 100644 --- a/packages/swing-store/src/bundleStore.js +++ b/packages/swing-store/src/bundleStore.js @@ -7,7 +7,6 @@ import { checkBundle } from '@endo/check-bundle/lite.js'; import { Nat } from '@endo/nat'; import { Fail, q } from '@agoric/assert'; import { createSHA256 } from './hasher.js'; -import { buffer } from './util.js'; /** * @typedef { { moduleFormat: 'getExport', source: string, sourceMap?: string } } GetExportBundle @@ -27,7 +26,9 @@ import { buffer } from './util.js'; * * @typedef {{ * exportBundle: (name: string) => AsyncIterableIterator, - * importBundle: (artifactName: string, exporter: SwingStoreExporter, bundleID: string) => void, + * importBundleRecord: (key: string, value: string) => void, + * importBundle: (name: string, dataProvider: () => Promise) => Promise, + * assertComplete: (level: 'operational') => void, * getExportRecords: () => IterableIterator, * getArtifactNames: () => AsyncIterableIterator, * getBundleIDs: () => IterableIterator, @@ -39,6 +40,18 @@ import { buffer } from './util.js'; * */ +function bundleIDFromName(name) { + typeof name === 'string' || Fail`artifact name must be a string`; + const [tag, ...pieces] = name.split('.'); + if (tag !== 'bundle' || pieces.length !== 1) { + Fail`expected artifact name of the form 'bundle.{bundleID}', saw ${q( + name, + )}`; + } + const bundleID = pieces[0]; + return bundleID; +} + /** * @param {*} db * @param {() => void} ensureTxn @@ -54,6 +67,9 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { ) `); + // A populated record contains both bundleID and bundle, while a + // pruned record has a bundle of NULL. + function bundleArtifactName(bundleID) { return `bundle.${bundleID}`; } @@ -62,20 +78,36 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { return `b${Nat(version)}-${hash}`; } - const sqlAddBundle = db.prepare(` - INSERT OR REPLACE INTO bundles - (bundleID, bundle) - VALUES (?, ?) + // the PRIMARY KEY constraint requires the bundleID not already + // exist + const sqlAddBundleRecord = db.prepare(` + INSERT INTO bundles (bundleID, bundle) VALUES (?, NULL) `); - /** - * Store a bundle. Here the bundle itself is presumed valid. - * - * @param {string} bundleID - * @param {Bundle} bundle - */ - function addBundle(bundleID, bundle) { + // this sees both populated and pruned (not-yet-populated) records + const sqlHasBundleRecord = db.prepare(` + SELECT count(*) + FROM bundles + WHERE bundleID = ? + `); + sqlHasBundleRecord.pluck(); + + const sqlPopulateBundleRecord = db.prepare(` + UPDATE bundles SET bundle = $serialized WHERE bundleID = $bundleID + `); + + function addBundleRecord(bundleID) { + ensureTxn(); + sqlAddBundleRecord.run(bundleID); + } + + function populateBundle(bundleID, serialized) { ensureTxn(); + sqlHasBundleRecord.get(bundleID) || Fail`missing ${bundleID}`; + sqlPopulateBundleRecord.run({ bundleID, serialized }); + } + + function serializeBundle(bundleID, bundle) { const { moduleFormat } = bundle; let serialized; if (bundleID.startsWith('b0-')) { @@ -98,19 +130,55 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { } else { throw Fail`unsupported BundleID ${bundleID}`; } - sqlAddBundle.run(bundleID, serialized); + return serialized; + } + + /** + * Store a complete bundle in a single operation, used by runtime + * (i.e. not an import). We rely upon the caller to provide a + * correct bundle (e.g. no unexpected properties), but we still + * check the ID against the contents. + * + * @param {string} bundleID + * @param {Bundle} bundle + */ + function addBundle(bundleID, bundle) { + const serialized = serializeBundle(bundleID, bundle); + addBundleRecord(bundleID); + populateBundle(bundleID, serialized); noteExport(bundleArtifactName(bundleID), bundleID); } - const sqlHasBundle = db.prepare(` + const sqlGetPrunedBundles = db.prepare(` + SELECT bundleID + FROM bundles + WHERE bundle IS NULL + ORDER BY bundleID + `); + sqlGetPrunedBundles.pluck(); + + function getPrunedBundles() { + return sqlGetPrunedBundles.all(); + } + + function assertComplete(level) { + assert.equal(level, 'operational'); // for now + const pruned = getPrunedBundles(); + if (pruned.length) { + throw Fail`missing bundles for: ${pruned.join(',')}`; + } + } + + const sqlHasPopulatedBundle = db.prepare(` SELECT count(*) FROM bundles WHERE bundleID = ? + AND bundle IS NOT NULL `); - sqlHasBundle.pluck(true); + sqlHasPopulatedBundle.pluck(true); function hasBundle(bundleID) { - const count = sqlHasBundle.get(bundleID); + const count = sqlHasPopulatedBundle.get(bundleID); return count !== 0; } @@ -119,15 +187,15 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { FROM bundles WHERE bundleID = ? `); - sqlGetBundle.pluck(true); /** * @param {string} bundleID * @returns {Bundle} */ function getBundle(bundleID) { - const rawBundle = sqlGetBundle.get(bundleID); - rawBundle || Fail`bundle ${q(bundleID)} not found`; + const row = + sqlGetBundle.get(bundleID) || Fail`bundle ${q(bundleID)} not found`; + const rawBundle = row.bundle || Fail`bundle ${q(bundleID)} pruned`; if (bundleID.startsWith('b0-')) { return harden(JSON.parse(rawBundle)); } else if (bundleID.startsWith('b1-')) { @@ -153,6 +221,14 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { } } + // take an export-data record (id/hash but not bundle contents) and + // insert something in the DB + function importBundleRecord(key, value) { + const bundleID = bundleIDFromName(key); + assert.equal(bundleID, value); + addBundleRecord(bundleID); + } + /** * Read a bundle and return it as a stream of data suitable for export to * another store. @@ -166,14 +242,10 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { * @returns {AsyncIterableIterator} */ async function* exportBundle(name) { - typeof name === 'string' || Fail`artifact name must be a string`; - const parts = name.split('.'); - const [type, bundleID] = parts; - // prettier-ignore - (parts.length === 2 && type === 'bundle') || - Fail`expected artifact name of the form 'bundle.{bundleID}', saw ${q(name)}`; - const rawBundle = sqlGetBundle.get(bundleID); - rawBundle || Fail`bundle ${q(name)} not available`; + const bundleID = bundleIDFromName(name); + const row = + sqlGetBundle.get(bundleID) || Fail`bundle ${q(bundleID)} not found`; + const rawBundle = row.bundle || Fail`bundle ${q(bundleID)} pruned`; yield* Readable.from(Buffer.from(rawBundle)); } @@ -209,23 +281,17 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { } /** - * @param {string} name Artifact name of the bundle - * @param {SwingStoreExporter} exporter Whence to get the bits - * @param {string} bundleID Bundle ID of the bundle + * Call addBundleRecord() first, then this importBundle() will + * populate the record. + * + * @param {string} name Artifact name, `bundle.${bundleID}` + * @param {() => Promise} dataProvider Function to get bundle bytes * @returns {Promise} */ - async function importBundle(name, exporter, bundleID) { + async function importBundle(name, dataProvider) { await 0; // no synchronous prefix - const parts = name.split('.'); - const [type, bundleIDkey] = parts; - // prettier-ignore - parts.length === 2 && type === 'bundle' || - Fail`expected artifact name of the form 'bundle.{bundleID}', saw '${q(name)}'`; - bundleIDkey === bundleID || - Fail`bundle artifact name ${name} doesn't match bundleID ${bundleID}`; - const artifactChunks = exporter.getArtifact(name); - const inStream = Readable.from(artifactChunks); - const data = await buffer(inStream); + const bundleID = bundleIDFromName(name); + const data = await dataProvider(); if (bundleID.startsWith('b0-')) { // we dissect and reassemble the bundle, to exclude unexpected properties const { moduleFormat, source, sourceMap } = JSON.parse(data.toString()); @@ -234,7 +300,7 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { const serialized = JSON.stringify(bundle); bundleID === bundleIdFromHash(0, createSHA256(serialized).finish()) || Fail`bundleID ${q(bundleID)} does not match bundle artifact`; - addBundle(bundleID, bundle); + populateBundle(bundleID, serialized); } else if (bundleID.startsWith('b1-')) { /** @type {EndoZipBase64Bundle} */ const bundle = harden({ @@ -245,7 +311,7 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { // Assert that the bundle contents match the ID and hash // eslint-disable-next-line @jessie.js/no-nested-await await checkBundle(bundle, computeSha512, bundleID); - addBundle(bundleID, bundle); + populateBundle(bundleID, serializeBundle(bundleID, bundle)); } else { Fail`unsupported BundleID ${q(bundleID)}`; } @@ -265,7 +331,7 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { const dump = {}; for (const row of sql.iterate()) { const { bundleID, bundle } = row; - dump[bundleID] = encodeBase64(bundle); + dump[bundleID] = encodeBase64(Buffer.from(bundle, 'utf-8')); } return dump; } @@ -282,14 +348,18 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { } return harden({ + importBundleRecord, + importBundle, + assertComplete, + addBundle, hasBundle, getBundle, deleteBundle, + getExportRecords, getArtifactNames, exportBundle, - importBundle, getBundleIDs, dumpBundles, diff --git a/packages/swing-store/src/exporter.js b/packages/swing-store/src/exporter.js index 80f1c030f86..47489454547 100644 --- a/packages/swing-store/src/exporter.js +++ b/packages/swing-store/src/exporter.js @@ -44,26 +44,29 @@ import { makeTranscriptStore } from './transcriptStore.js'; * * Content of validation data (with supporting entries for indexing): * - kv.${key} = ${value} // ordinary kvStore data entry - * - snapshot.${vatID}.${snapPos} = ${{ vatID, snapPos, hash }); + * - snapshot.${vatID}.${snapPos} = ${{ vatID, snapPos, hash }}; * - snapshot.${vatID}.current = `snapshot.${vatID}.${snapPos}` * - transcript.${vatID}.${startPos} = ${{ vatID, startPos, endPos, hash }} * - transcript.${vatID}.current = ${{ vatID, startPos, endPos, hash }} * * @property {() => AnyIterableIterator} getArtifactNames * - * Get a list of name of artifacts available from the swingStore. A name returned - * by this method guarantees that a call to `getArtifact` on the same exporter - * instance will succeed. Options control the filtering of the artifact names + * Get a list of name of artifacts available from the swingStore. A name + * returned by this method guarantees that a call to `getArtifact` on the same + * exporter instance will succeed. The `exportMode` option to + * `makeSwingStoreExporter` controls the filtering of the artifact names * yielded. * * Artifact names: * - transcript.${vatID}.${startPos}.${endPos} * - snapshot.${vatID}.${snapPos} + * - bundle.${bundleID} * * @property {(name: string) => AnyIterableIterator} getArtifact * - * Retrieve an artifact by name. May throw if the artifact is not available, - * which can occur if the artifact is historical and wasn't been preserved. + * Retrieve an artifact by name as a sequence of binary chunks. May throw if + * the artifact is not available, which can occur if the artifact is historical + * and wasn't preserved. * * @property {() => Promise} close * @@ -114,10 +117,9 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { * @yields {KVPair} */ async function* getExportData() { - const kvPairs = sqlGetAllKVData.iterate(); - for (const kv of kvPairs) { - if (getKeyType(kv.key) === 'consensus') { - yield [`kv.${kv.key}`, kv.value]; + for (const { key, value } of sqlGetAllKVData.iterate()) { + if (getKeyType(key) === 'consensus') { + yield [`kv.${key}`, value]; } } yield* snapStore.getExportRecords(true); @@ -144,13 +146,13 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { const [type] = name.split('.', 1); if (type === 'snapshot') { - return snapStore.exportSnapshot(name, exportHistoricalSnapshots); + return snapStore.exportSnapshot(name); } else if (type === 'transcript') { - return transcriptStore.exportSpan(name, exportHistoricalTranscripts); + return transcriptStore.exportSpan(name); } else if (type === 'bundle') { return bundleStore.exportBundle(name); } else { - throw Fail`invalid artifact type ${q(type)}`; + throw Fail`invalid type in artifact name ${q(name)}`; } } diff --git a/packages/swing-store/src/importer.js b/packages/swing-store/src/importer.js index e2eae80ab60..49cb854058a 100644 --- a/packages/swing-store/src/importer.js +++ b/packages/swing-store/src/importer.js @@ -1,204 +1,121 @@ import { Fail, q } from '@agoric/assert'; import { makeSwingStore } from './swingStore.js'; +import { buffer } from './util.js'; +import { assertComplete } from './assertComplete.js'; + +/** + * @typedef { object } ImportSwingStoreOptions + * @property { boolean } [includeHistorical] Should the importer pay attention to historical artifacts? + */ /** * Function used to create a new swingStore from an object implementing the * exporter API. The exporter API may be provided by a swingStore instance, or * implemented by a host to restore data that was previously exported. * - * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter - * @typedef { import('./swingStore').SwingStore } SwingStore - * @typedef {(exporter: SwingStoreExporter) => Promise} ImportSwingStore - */ - -function parseVatArtifactExportKey(key) { - const parts = key.split('.'); - const [_type, vatID, rawPos] = parts; - // prettier-ignore - parts.length === 3 || - Fail`expected artifact name of the form '{type}.{vatID}.{pos}', saw ${q(key)}`; - const isCurrent = rawPos === 'current'; - let pos; - if (isCurrent) { - pos = -1; - } else { - pos = Number(rawPos); - } - - return { vatID, isCurrent, pos }; -} - -function artifactKey(type, vatID, pos) { - return `${type}.${vatID}.${pos}`; -} - -/** - * @param {SwingStoreExporter} exporter + * @param {import('./exporter').SwingStoreExporter} exporter * @param {string | null} [dirPath] - * @param {object} options - * @returns {Promise} + * @param {ImportSwingStoreOptions} options + * @returns {Promise} */ export async function importSwingStore(exporter, dirPath = null, options = {}) { - if (dirPath) { - typeof dirPath === 'string' || Fail`dirPath must be a string`; + if (dirPath && typeof dirPath !== 'string') { + Fail`dirPath must be a string`; } const { includeHistorical = false } = options; const store = makeSwingStore(dirPath, true, options); const { kernelStorage, internal } = store; - // Artifact metadata, keyed as `${type}.${vatID}.${pos}` - // - // Note that this key is almost but not quite the artifact name, since the - // names of transcript span artifacts also include the endPos, but the endPos - // value is in flux until the span is complete. - const artifactMetadata = new Map(); + // For every exportData entry, we add a DB record. 'kv' entries are + // the "kvStore shadow table", and are not associated with any + // artifacts. All other entries are associated with an artifact, + // however the import may or may not contain that artifact (the + // dataset can be incomplete: either the original DB was pruned at + // some point, or the exporter did not choose to include + // everything). The DB records we add are marked as incomplete (as + // if they had been pruned locally), and can be populated later when + // the artifact is retrieved. - // Each vat requires a transcript span and (usually) a snapshot. This table - // tracks which of these we've seen, keyed by vatID. - // vatID -> { snapshotKey: metadataKey, transcriptKey: metatdataKey } - const vatArtifacts = new Map(); - const bundleArtifacts = new Map(); + // While unlikely, the getExportData() protocol *is* allowed to + // deliver multiple values for the same key (last one wins), or use + // 'null' to delete a previously-defined key. So our first pass both + // installs the kvStore shadow records, and de-dups/deletes the + // metadata records into this Map. + + const allMetadata = new Map(); for await (const [key, value] of exporter.getExportData()) { const [tag] = key.split('.', 1); - const subKey = key.substring(tag.length + 1); if (tag === 'kv') { // 'kv' keys contain individual kvStore entries + const subKey = key.substring(tag.length + 1); if (value == null) { // Note '==' rather than '===': any nullish value implies deletion kernelStorage.kvStore.delete(subKey); } else { kernelStorage.kvStore.set(subKey, value); } - } else if (tag === 'bundle') { - // 'bundle' keys contain bundle IDs - if (value == null) { - bundleArtifacts.delete(key); - } else { - bundleArtifacts.set(key, value); - } - } else if (tag === 'transcript' || tag === 'snapshot') { - // 'transcript' and 'snapshot' keys contain artifact description info. - assert(value); // make TypeScript shut up - const { vatID, isCurrent, pos } = parseVatArtifactExportKey(key); - if (isCurrent) { - const vatInfo = vatArtifacts.get(vatID) || {}; - if (tag === 'snapshot') { - // `export.snapshot.{vatID}.current` directly identifies the current snapshot artifact - vatInfo.snapshotKey = value; - } else if (tag === 'transcript') { - // `export.transcript.${vatID}.current` contains a metadata record for the current - // state of the current transcript span as of the time of export - const metadata = JSON.parse(value); - vatInfo.transcriptKey = artifactKey(tag, vatID, metadata.startPos); - artifactMetadata.set(vatInfo.transcriptKey, metadata); - } - vatArtifacts.set(vatID, vatInfo); - } else { - artifactMetadata.set(artifactKey(tag, vatID, pos), JSON.parse(value)); - } + } else if (value == null) { + allMetadata.delete(key); } else { - Fail`unknown artifact type tag ${q(tag)} on import`; + allMetadata.set(key, value); } } - // At this point we should have acquired the entire KV store state, plus - // sufficient metadata to identify the complete set of artifacts we'll need to - // fetch along with the information required to validate each of them after - // fetching. - // - // Depending on how the export was parameterized, the metadata may also include - // information about historical artifacts that we might or might not actually - // fetch depending on how this import was parameterized - - // Fetch the set of current artifacts. - - // Keep track of fetched artifacts in this set so we don't fetch them a second - // time if we are trying for historical artifacts also. - const fetchedArtifacts = new Set(); + // Now take each metadata record and install the stub/pruned entry + // into the DB. - for await (const [vatID, vatInfo] of vatArtifacts.entries()) { - // For each vat, we *must* have a transcript span. If this is not the very - // first transcript span in the history of that vat, then we also must have - // a snapshot for the state of the vat immediately prior to when the - // transcript span begins. - vatInfo.transcriptKey || - Fail`missing current transcript key for vat ${q(vatID)}`; - const transcriptInfo = artifactMetadata.get(vatInfo.transcriptKey); - transcriptInfo || Fail`missing transcript metadata for vat ${q(vatID)}`; - let snapshotInfo; - if (vatInfo.snapshotKey) { - snapshotInfo = artifactMetadata.get(vatInfo.snapshotKey); - snapshotInfo || Fail`missing snapshot metadata for vat ${q(vatID)}`; - } - if (!snapshotInfo) { - transcriptInfo.startPos === 0 || - Fail`missing current snapshot for vat ${q(vatID)}`; + for (const [key, value] of allMetadata.entries()) { + const [tag] = key.split('.', 1); + if (tag === 'bundle') { + internal.bundleStore.importBundleRecord(key, value); + } else if (tag === 'snapshot') { + internal.snapStore.importSnapshotRecord(key, value); + } else if (tag === 'transcript') { + internal.transcriptStore.importTranscriptSpanRecord(key, value); } else { - snapshotInfo.snapPos + 1 === transcriptInfo.startPos || - Fail`current transcript for vat ${q(vatID)} doesn't go with snapshot`; - fetchedArtifacts.add(vatInfo.snapshotKey); + Fail`unknown export-data type ${q(tag)} on import`; } - await (!snapshotInfo || - internal.snapStore.importSnapshot( - vatInfo.snapshotKey, - exporter, - snapshotInfo, - )); - - const transcriptArtifactName = `${vatInfo.transcriptKey}.${transcriptInfo.endPos}`; - await internal.transcriptStore.importSpan( - transcriptArtifactName, - exporter, - transcriptInfo, - ); - fetchedArtifacts.add(transcriptArtifactName); - } - const bundleArtifactNames = Array.from(bundleArtifacts.keys()).sort(); - for await (const bundleArtifactName of bundleArtifactNames) { - await internal.bundleStore.importBundle( - bundleArtifactName, - exporter, - bundleArtifacts.get(bundleArtifactName), - ); } - if (!includeHistorical) { - await exporter.close(); - return store; - } - - // If we're also importing historical artifacts, have the exporter enumerate - // the complete set of artifacts it has and fetch all of them except for the - // ones we've already fetched. - for await (const artifactName of exporter.getArtifactNames()) { - if (fetchedArtifacts.has(artifactName)) { - continue; - } - let fetchedP; - if (artifactName.startsWith('snapshot.')) { - fetchedP = internal.snapStore.importSnapshot( - artifactName, - exporter, - artifactMetadata.get(artifactName), - ); - } else if (artifactName.startsWith('transcript.')) { - // strip endPos off artifact name - const metadataKey = artifactName.split('.').slice(0, 3).join('.'); - fetchedP = internal.transcriptStore.importSpan( - artifactName, - exporter, - artifactMetadata.get(metadataKey), + // All the metadata is now installed, and we're prepared for + // artifacts. We walk `getArtifactNames()` and offer each one to the + // submodule, which ignores historical ones (unless + // 'includeHistorical' is true), and validates+accepts the + // rest. This is an initial import, so we don't need to check if we + // already have the data, but the submodule function is free to do + // that check if they want. + + for await (const name of exporter.getArtifactNames()) { + const makeChunkIterator = () => exporter.getArtifact(name); + const dataProvider = async () => buffer(makeChunkIterator()); + const [tag] = name.split('.', 1); + // TODO: pass the same args to all artifact importers, and let + // stores register their functions by + // 'type'. https://github.com/Agoric/agoric-sdk/pull/8075#discussion_r1285265453 + if (tag === 'bundle') { + await internal.bundleStore.importBundle(name, dataProvider); + } else if (tag === 'snapshot') { + await internal.snapStore.populateSnapshot(name, makeChunkIterator, { + includeHistorical, + }); + } else if (tag === 'transcript') { + await internal.transcriptStore.populateTranscriptSpan( + name, + makeChunkIterator, + { includeHistorical }, ); - } else if (artifactName.startsWith('bundle.')) { - // already taken care of - continue; } else { - Fail`unknown artifact type: ${artifactName}`; + Fail`unknown artifact type ${q(tag)} on import`; } - await fetchedP; } + + // We've installed all the artifacts that we could, now do a + // completeness check. + + assertComplete(internal, 'operational'); + await exporter.close(); return store; } diff --git a/packages/swing-store/src/snapStore.js b/packages/swing-store/src/snapStore.js index faed03ca7e0..4cff5e2a6d7 100644 --- a/packages/swing-store/src/snapStore.js +++ b/packages/swing-store/src/snapStore.js @@ -24,6 +24,11 @@ import { buffer } from './util.js'; * @property {number} compressedSize */ +/** + * @template T + * @typedef { import('./exporter').AnyIterableIterator } AnyIterableIterator + */ + /** * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter * @@ -37,10 +42,12 @@ import { buffer } from './util.js'; * }} SnapStore * * @typedef {{ - * exportSnapshot: (name: string, includeHistorical: boolean) => AsyncIterableIterator, - * importSnapshot: (artifactName: string, exporter: SwingStoreExporter, artifactMetadata: Map) => void, + * exportSnapshot: (name: string) => AsyncIterableIterator, * getExportRecords: (includeHistorical: boolean) => IterableIterator, * getArtifactNames: (includeHistorical: boolean) => AsyncIterableIterator, + * importSnapshotRecord: (key: string, value: string) => void, + * populateSnapshot: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { includeHistorical: boolean }) => Promise, + * assertComplete: (level: 'operational') => void, * }} SnapStoreInternal * * @typedef {{ @@ -81,11 +88,26 @@ export function makeSnapStore( compressedSize INTEGER, compressedSnapshot BLOB, PRIMARY KEY (vatID, snapPos), - UNIQUE (vatID, inUse), - CHECK(compressedSnapshot is not null or inUse is null) + UNIQUE (vatID, inUse) ) `); + // NOTE: there are two versions of this schema. The original, which + // we'll call "version 1A", has a: + // CHECK(compressedSnapshot is not null or inUse is null) + // in the table. Version 1B is missing that constraint. Any DB + // created by the original code will use 1A. Any DB created by the + // new version will use 1B. The import process needs to temporarily + // violate that check, but any DB created by `importSwingStore` is + // (by definition) new, so it will use 1B, which doesn't enforce the + // check. We expect to implement schema migration + // (https://github.com/Agoric/agoric-sdk/issues/8089) soon, which + // will upgrade both 1A and 1B to "version 2", which will omit the + // check (in addition to any other changes we need at that point) + + // pruned snapshots will have compressedSnapshot of NULL, and might + // also have NULL for uncompressedSize and compressedSize + const sqlDeleteAllUnusedSnapshots = db.prepare(` DELETE FROM snapshots WHERE inUse is null @@ -98,6 +120,12 @@ export function makeSnapStore( function deleteAllUnusedSnapshots() { ensureTxn(); sqlDeleteAllUnusedSnapshots.run(); + + // NOTE: this is more than pruning the snapshot data, it deletes + // the metadata/hash as well, making it impossible to safely + // repopulate the snapshot data from an untrusted source. We need + // to replace this with a method that merely nulls out the + // 'compressedSnapshot' field. } function snapshotArtifactName(rec) { @@ -255,10 +283,9 @@ export function makeSnapStore( * `snapshot.${vatID}.${startPos}` * * @param {string} name - * @param {boolean} includeHistorical * @returns {AsyncIterableIterator} */ - function exportSnapshot(name, includeHistorical) { + function exportSnapshot(name) { typeof name === 'string' || Fail`artifact name must be a string`; const parts = name.split('.'); const [type, vatID, pos] = parts; @@ -268,9 +295,8 @@ export function makeSnapStore( const snapPos = Number(pos); const snapshotInfo = sqlGetSnapshot.get(vatID, snapPos); snapshotInfo || Fail`snapshot ${q(name)} not available`; - const { inUse, compressedSnapshot } = snapshotInfo; + const { compressedSnapshot } = snapshotInfo; compressedSnapshot || Fail`artifact ${q(name)} is not available`; - inUse || includeHistorical || Fail`artifact ${q(name)} is not available`; // weird construct here is because we need to be able to throw before the generator starts async function* exporter() { const gzReader = Readable.from(compressedSnapshot); @@ -412,6 +438,13 @@ export function makeSnapStore( ORDER BY vatID, snapPos `); + const sqlGetAvailableSnapshots = db.prepare(` + SELECT vatID, snapPos, hash, uncompressedSize, compressedSize, inUse + FROM snapshots + WHERE inUse IS ? AND compressedSnapshot is not NULL + ORDER BY vatID, snapPos + `); + /** * Obtain artifact metadata records for spanshots contained in this store. * @@ -448,40 +481,85 @@ export function makeSnapStore( } async function* getArtifactNames(includeHistorical) { - for (const rec of sqlGetSnapshotMetadata.iterate(1)) { + for (const rec of sqlGetAvailableSnapshots.iterate(1)) { yield snapshotArtifactName(rec); } if (includeHistorical) { - for (const rec of sqlGetSnapshotMetadata.iterate(null)) { + for (const rec of sqlGetAvailableSnapshots.iterate(null)) { yield snapshotArtifactName(rec); } } } + const sqlAddSnapshotRecord = db.prepare(` + INSERT INTO snapshots (vatID, snapPos, hash, inUse) + VALUES (?, ?, ?, ?) + `); + + function importSnapshotRecord(key, value) { + ensureTxn(); + const [tag, ...pieces] = key.split('.'); + assert.equal(tag, 'snapshot'); + const [_vatID, endPos] = pieces; + if (endPos === 'current') { + // metadata['snapshot.v1.current'] = 'snapshot.v1.5' , i.e. it + // points to the name of the current artifact. We could + // conceivably remember this and compare it against the .inUse + // property of that record, but it's not worth the effort (we + // might encounter the records in either order). + return; + } + const metadata = JSON.parse(value); + const { vatID, snapPos, hash, inUse } = metadata; + vatID || Fail`snapshot metadata missing vatID: ${metadata}`; + snapPos !== undefined || + Fail`snapshot metadata missing snapPos: ${metadata}`; + hash || Fail`snapshot metadata missing hash: ${metadata}`; + inUse !== undefined || Fail`snapshot metadata missing inUse: ${metadata}`; + + sqlAddSnapshotRecord.run(vatID, snapPos, hash, inUse ? 1 : null); + } + + const sqlGetSnapshotHashFor = db.prepare(` + SELECT hash, inUse + FROM snapshots + WHERE vatID = ? AND snapPos = ? + `); + + const sqlPopulateSnapshot = db.prepare(` + UPDATE snapshots SET + uncompressedSize = ?, compressedSize = ?, compressedSnapshot = ? + WHERE vatID = ? AND snapPos = ? + `); + /** * @param {string} name Artifact name of the snapshot - * @param {SwingStoreExporter} exporter Whence to get the bits - * @param {object} info Metadata describing the artifact + * @param {() => AnyIterableIterator} makeChunkIterator get an iterator of snapshot byte chunks + * @param {object} options + * @param {boolean} options.includeHistorical * @returns {Promise} */ - async function importSnapshot(name, exporter, info) { + async function populateSnapshot(name, makeChunkIterator, options) { + ensureTxn(); + const { includeHistorical } = options; const parts = name.split('.'); const [type, vatID, rawEndPos] = parts; // prettier-ignore parts.length === 3 && type === 'snapshot' || Fail`expected snapshot name of the form 'snapshot.{vatID}.{snapPos}', saw '${q(name)}'`; - // prettier-ignore - info.vatID === vatID || - Fail`snapshot name says vatID ${q(vatID)}, metadata says ${q(info.vatID)}`; const snapPos = Number(rawEndPos); - // prettier-ignore - info.snapPos === snapPos || - Fail`snapshot name says snapPos ${q(snapPos)}, metadata says ${q(info.snapPos)}`; + const metadata = + sqlGetSnapshotHashFor.get(vatID, snapPos) || + Fail`no metadata for snapshot ${name}`; - const artifactChunks = exporter.getArtifact(name); + if (!metadata.inUse && !includeHistorical) { + return; // ignore old snapshots + } + + const artifactChunks = makeChunkIterator(); const inStream = Readable.from(artifactChunks); - let size = 0; - inStream.on('data', chunk => (size += chunk.length)); + let uncompressedSize = 0; + inStream.on('data', chunk => (uncompressedSize += chunk.length)); const hashStream = createHash('sha256'); const gzip = createGzip(); inStream.pipe(hashStream); @@ -489,21 +567,37 @@ export function makeSnapStore( const compressedArtifact = await buffer(gzip); await finished(inStream); const hash = hashStream.digest('hex'); + + // validate against the previously-established metadata // prettier-ignore - info.hash === hash || - Fail`snapshot ${q(name)} hash is ${q(hash)}, metadata says ${q(info.hash)}`; - ensureTxn(); - sqlSaveSnapshot.run( - vatID, - snapPos, - info.inUse ? 1 : null, - info.hash, - size, + metadata.hash === hash || + Fail`snapshot ${q(name)} hash is ${q(hash)}, metadata says ${q(metadata.hash)}`; + + sqlPopulateSnapshot.run( + uncompressedSize, compressedArtifact.length, compressedArtifact, + vatID, + snapPos, ); } + const sqlListPrunedCurrentSnapshots = db.prepare(` + SELECT vatID FROM snapshots + WHERE inUse = 1 AND compressedSnapshot IS NULL + ORDER BY vatID + `); + sqlListPrunedCurrentSnapshots.pluck(); + + function assertComplete(level) { + assert.equal(level, 'operational'); // for now + // every 'inUse' snapshot must be populated + const vatIDs = sqlListPrunedCurrentSnapshots.all(); + if (vatIDs.length) { + throw Fail`current snapshots are pruned for vats ${vatIDs.join(',')}`; + } + } + const sqlListAllSnapshots = db.prepare(` SELECT vatID, snapPos, inUse, hash, uncompressedSize, compressedSize FROM snapshots @@ -563,10 +657,14 @@ export function makeSnapStore( deleteVatSnapshots, stopUsingLastSnapshot, getSnapshotInfo, + getExportRecords, getArtifactNames, exportSnapshot, - importSnapshot, + + importSnapshotRecord, + populateSnapshot, + assertComplete, hasHash, listAllSnapshots, diff --git a/packages/swing-store/src/transcriptStore.js b/packages/swing-store/src/transcriptStore.js index 3d300bec2d7..b0ffcb605cd 100644 --- a/packages/swing-store/src/transcriptStore.js +++ b/packages/swing-store/src/transcriptStore.js @@ -6,8 +6,11 @@ import BufferLineTransform from '@agoric/internal/src/node/buffer-line-transform import { createSHA256 } from './hasher.js'; /** - * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter - * + * @template T + * @typedef { IterableIterator | AsyncIterableIterator } AnyIterableIterator + */ + +/** * @typedef {{ * initTranscript: (vatID: string) => void, * rolloverSpan: (vatID: string) => number, @@ -19,10 +22,12 @@ import { createSHA256 } from './hasher.js'; * }} TranscriptStore * * @typedef {{ - * exportSpan: (name: string, includeHistorical: boolean) => AsyncIterableIterator - * importSpan: (artifactName: string, exporter: SwingStoreExporter, artifactMetadata: Map) => Promise, + * exportSpan: (name: string) => AsyncIterableIterator * getExportRecords: (includeHistorical: boolean) => IterableIterator, * getArtifactNames: (includeHistorical: boolean) => AsyncIterableIterator, + * importTranscriptSpanRecord: (key: string, value: string) => void, + * populateTranscriptSpan: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { includeHistorical: boolean }) => Promise, + * assertComplete: (level: 'operational') => void, * readFullVatTranscript: (vatID: string) => Iterable<{position: number, item: string}> * }} TranscriptStoreInternal * @@ -82,7 +87,7 @@ export function makeTranscriptStore( // // The transcriptItems associated with historical spans may or may not exist, // depending on pruning. However, the items associated with the current span - // must always be present + // must always be present. db.exec(` CREATE TABLE IF NOT EXISTS transcriptSpans ( @@ -377,6 +382,17 @@ export function makeTranscriptStore( } } + // 'position' is not recycled across incarnations, so strictly + // speaking this query doesn't need to filter on 'incarnation = ?', + // but this will catch problems like items with incorrect or missing + // incarnation values + + const sqlCountPopulatedSpanItems = db.prepare(` + SELECT COUNT(*) FROM transcriptItems + WHERE vatID = ? AND incarnation = ? AND position >= ? AND position < ? + `); + sqlCountPopulatedSpanItems.pluck(); + /** * Obtain artifact names for spans contained in this store. * @@ -392,6 +408,13 @@ export function makeTranscriptStore( ? sqlGetAllSpanMetadata : sqlGetCurrentSpanMetadata; for (const rec of sql.iterate()) { + const { vatID, incarnation, startPos, endPos } = rec; + if ( + !sqlCountPopulatedSpanItems.get(vatID, incarnation, startPos, endPos) + ) { + continue; + } + yield spanArtifactName(rec); } } @@ -432,15 +455,22 @@ export function makeTranscriptStore( } } startPos <= endPos || Fail`${q(startPos)} <= ${q(endPos)}}`; + const expectedCount = endPos - startPos; function* reader() { + let count = 0; for (const { item } of sqlReadSpanItems.iterate( vatID, startPos, endPos, )) { yield item; + count += 1; } + count === expectedCount || + Fail`read ${q(count)} transcript entries (expected ${q( + expectedCount, + )})`; } if (startPos === endPos) { @@ -465,12 +495,10 @@ export function makeTranscriptStore( * `transcript.${vatID}.${startPos}.${endPos}` * * @param {string} name The name of the transcript artifact to be read - * @param {boolean} includeHistorical If true, allow non-current spans to be fetched - * * @returns {AsyncIterableIterator} * @yields {Uint8Array} */ - async function* exportSpan(name, includeHistorical) { + async function* exportSpan(name) { typeof name === 'string' || Fail`artifact name must be a string`; const parts = name.split('.'); const [type, vatID, pos] = parts; @@ -479,9 +507,6 @@ export function makeTranscriptStore( Fail`expected artifact name of the form 'transcript.{vatID}.{startPos}.{endPos}', saw ${q(name)}`; const isCurrent = sqlGetSpanIsCurrent.get(vatID, pos); isCurrent !== undefined || Fail`transcript span ${q(name)} not available`; - isCurrent || - includeHistorical || - Fail`transcript span ${q(name)} not available`; const startPos = Number(pos); for (const entry of readSpan(vatID, startPos)) { yield Buffer.from(`${entry}\n`); @@ -516,33 +541,84 @@ export function makeTranscriptStore( noteExport(spanMetadataKey(rec), JSON.stringify(rec)); }; + function importTranscriptSpanRecord(key, value) { + ensureTxn(); + const [tag, keyVatID, keyStartPos] = key.split('.'); + assert.equal(tag, 'transcript'); + const metadata = JSON.parse(value); + if (key.endsWith('.current') !== Boolean(metadata.isCurrent)) { + throw Fail`transcript key ${key} mismatches metadata ${metadata}`; + } + const { vatID, startPos, endPos, hash, isCurrent, incarnation } = metadata; + vatID || Fail`transcript metadata missing vatID: ${metadata}`; + startPos !== undefined || + Fail`transcript metadata missing startPos: ${metadata}`; + endPos !== undefined || + Fail`transcript metadata missing endPos: ${metadata}`; + hash || Fail`transcript metadata missing hash: ${metadata}`; + isCurrent !== undefined || + Fail`transcript metadata missing isCurrent: ${metadata}`; + incarnation !== undefined || + Fail`transcript metadata missing incarnation: ${metadata}`; + if (keyStartPos !== 'current') { + if (Number(keyStartPos) !== startPos) { + Fail`transcript key ${key} mismatches metadata ${metadata}`; + } + } + keyVatID === vatID || + Fail`transcript key ${key} mismatches metadata ${metadata}`; + + // sqlWriteSpan is an INSERT, so the PRIMARY KEY (vatID, position) + // constraint will catch broken export-data errors like trying to + // add two different versions of the same span (e.g. one holding + // items 4..8, a second holding 4..9) + + sqlWriteSpan.run( + vatID, + startPos, + endPos, + hash, + isCurrent ? 1 : null, + incarnation, + ); + } + + const sqlGetSpanMetadataFor = db.prepare(` + SELECT hash, isCurrent, incarnation, endPos + FROM transcriptSpans + WHERE vatID = ? AND startPos = ? + `); + /** * Import a transcript span from another store. * * @param {string} name Artifact Name of the transcript span - * @param {SwingStoreExporter} exporter Exporter from which to get the span data - * @param {object} info Metadata describing the span + * @param {() => AnyIterableIterator} makeChunkIterator get an iterator of transcript byte chunks + * @param {object} options + * @param {boolean} options.includeHistorical * * @returns {Promise} */ - async function importSpan(name, exporter, info) { + async function populateTranscriptSpan(name, makeChunkIterator, options) { + ensureTxn(); + const { includeHistorical } = options; const parts = name.split('.'); const [type, vatID, rawStartPos, rawEndPos] = parts; // prettier-ignore parts.length === 4 && type === 'transcript' || Fail`expected artifact name of the form 'transcript.{vatID}.{startPos}.{endPos}', saw '${q(name)}'`; - // prettier-ignore - info.vatID === vatID || - Fail`artifact name says vatID ${q(vatID)}, metadata says ${q(info.vatID)}`; const startPos = Number(rawStartPos); - // prettier-ignore - info.startPos === startPos || - Fail`artifact name says startPos ${q(startPos)}, metadata says ${q(info.startPos)}`; const endPos = Number(rawEndPos); - // prettier-ignore - info.endPos === endPos || - Fail`artifact name says endPos ${q(endPos)}, metadata says ${q(info.endPos)}`; - const artifactChunks = exporter.getArtifact(name); + + const metadata = + sqlGetSpanMetadataFor.get(vatID, startPos) || + Fail`no metadata for transcript span ${name}`; + if (!metadata.isCurrent && !includeHistorical) { + return; // ignore old spans + } + assert.equal(metadata.endPos, endPos); + + const artifactChunks = await makeChunkIterator(); const inStream = Readable.from(artifactChunks); const lineTransform = new BufferLineTransform(); const lineStream = inStream.pipe(lineTransform).setEncoding('utf8'); @@ -550,21 +626,38 @@ export function makeTranscriptStore( let pos = startPos; for await (const line of lineStream) { const item = line.trimEnd(); - sqlAddItem.run(vatID, item, pos, info.incarnation); + sqlAddItem.run(vatID, item, pos, metadata.incarnation); hash = updateSpanHash(hash, item); pos += 1; } - pos === endPos || Fail`artifact ${name} is not available`; - info.hash === hash || - Fail`artifact ${name} hash is ${q(hash)}, metadata says ${q(info.hash)}`; - sqlWriteSpan.run( - info.vatID, - info.startPos, - info.endPos, - info.hash, - info.isCurrent ? 1 : null, - info.incarnation, - ); + pos === endPos || Fail`artifact ${name} is not complete`; + + // validate against the previously-established metadata + + // prettier-ignore + metadata.hash === hash || + Fail`artifact ${name} hash is ${q(hash)}, metadata says ${q(metadata.hash)}`; + + // If that passes, the not-yet-committed data is good. If it + // fails, the thrown error will flunk the import and inhibit a + // commit. So we're done. + } + + function assertComplete(level) { + assert.equal(level, 'operational'); // for now + // every 'isCurrent' transcript span must have all items + for (const rec of sqlGetCurrentSpanMetadata.iterate()) { + const { vatID, startPos, endPos, incarnation } = rec; + const count = sqlCountPopulatedSpanItems.get( + vatID, + incarnation, + startPos, + endPos, + ); + if (count !== endPos - startPos) { + throw Fail`incomplete current transcript span: ${count} items, ${rec}`; + } + } } return harden({ @@ -577,10 +670,13 @@ export function makeTranscriptStore( deleteVatTranscripts, exportSpan, - importSpan, getExportRecords, getArtifactNames, + importTranscriptSpanRecord, + populateTranscriptSpan, + assertComplete, + dumpTranscripts, readFullVatTranscript, }); diff --git a/packages/swing-store/test/test-bundles.js b/packages/swing-store/test/test-bundles.js index c62d3f36feb..b181611a129 100644 --- a/packages/swing-store/test/test-bundles.js +++ b/packages/swing-store/test/test-bundles.js @@ -4,11 +4,9 @@ import test from 'ava'; import tmp from 'tmp'; import { Buffer } from 'buffer'; import { createSHA256 } from '../src/hasher.js'; -import { - importSwingStore, - initSwingStore, - makeSwingStoreExporter, -} from '../src/index.js'; +import { initSwingStore } from '../src/swingStore.js'; +import { makeSwingStoreExporter } from '../src/exporter.js'; +import { importSwingStore } from '../src/importer.js'; import { buffer } from '../src/util.js'; function makeB0ID(bundle) { @@ -116,7 +114,9 @@ test('b0 import', async t => { t.is(name, nameA); yield Buffer.from(JSON.stringify(b0A)); }, - getArtifactNames: () => assert.fail('import should not query all names'), + async *getArtifactNames() { + yield* [nameA]; + }, close: async () => undefined, }; const { kernelStorage } = await importSwingStore(exporter); @@ -138,7 +138,9 @@ test('b0 bad import', async t => { t.is(name, nameA); yield Buffer.from(JSON.stringify(b0Abogus)); }, - getArtifactNames: () => assert.fail('import should not query all names'), + async *getArtifactNames() { + yield* [nameA]; + }, close: async () => undefined, }; await t.throwsAsync(async () => importSwingStore(exporter), { diff --git a/packages/swing-store/test/test-export.js b/packages/swing-store/test/test-export.js new file mode 100644 index 00000000000..611a0c0d497 --- /dev/null +++ b/packages/swing-store/test/test-export.js @@ -0,0 +1,308 @@ +import '@endo/init/debug.js'; + +import test from 'ava'; + +import { buffer } from '../src/util.js'; +import { initSwingStore, makeSwingStoreExporter } from '../src/index.js'; + +import { tmpDir, getSnapshotStream, makeB0ID } from './util.js'; + +const snapshotData = 'snapshot data'; +// this snapHash was computed manually +const snapHash = + 'e7dee7266896538616b630a5da40a90e007726a383e005a9c9c5dd0c2daf9329'; + +/** @type {import('../src/bundleStore.js').Bundle} */ +const bundle0 = { moduleFormat: 'nestedEvaluate', source: '1+1' }; +const bundle0ID = makeB0ID(bundle0); + +const exportTest = test.macro(async (t, mode) => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + // const dbDir = 't-db'; + + const options = {}; + if (mode === 'debug') { + options.keepSnapshots = true; // else old snapshots are deleted + } + const ss1 = initSwingStore(dbDir, options); + const ks = ss1.kernelStorage; + + // build a DB with three spans (only one inUse) and two snapshots + // (same) + + ks.kvStore.set('key1', 'value1'); + ks.bundleStore.addBundle(bundle0ID, bundle0); + ks.transcriptStore.initTranscript('v1'); + + ks.transcriptStore.addItem('v1', 'start-worker'); // 0 + ks.transcriptStore.addItem('v1', 'delivery1'); // 1 + await ks.snapStore.saveSnapshot('v1', 2, getSnapshotStream(snapshotData)); + ks.transcriptStore.addItem('v1', 'save-snapshot'); // 2 + ks.transcriptStore.rolloverSpan('v1'); // range= 0..3 + const spanHash1 = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + + ks.transcriptStore.addItem('v1', 'load-snapshot'); // 3 + ks.transcriptStore.addItem('v1', 'delivery2'); // 4 + await ks.snapStore.saveSnapshot('v1', 5, getSnapshotStream(snapshotData)); + ks.transcriptStore.addItem('v1', 'save-snapshot'); // 5 + ks.transcriptStore.rolloverSpan('v1'); // range= 3..6 + const spanHash2 = + '1947001e78e01bd1e773feb22b4ffc530447373b9de9274d5d5fbda3f23dbf2b'; + + ks.transcriptStore.addItem('v1', 'load-snapshot'); // 6 + ks.transcriptStore.addItem('v1', 'delivery3'); // 7 + const spanHash3 = + 'e6b42c6a3fb94285a93162f25a9fc0145fd4c5bb144917dc572c50ae2d02ee69'; + // current range= 6..8 + + ss1.hostStorage.commit(); + + // create an export, and assert that the pieces match what we + // expect. exportMode='current' means we get all metadata, no + // historical transcript spans, and no historical snapshots + + assert.typeof(mode, 'string'); + /** @typedef {import('../src/exporter.js').ExportMode} ExportMode */ + let exportMode = /** @type {ExportMode} */ (mode); + if (mode === 'debug-on-pruned') { + exportMode = 'debug'; + } + const exporter = makeSwingStoreExporter(dbDir, exportMode); + + // exportData + { + const exportData = new Map(); + for await (const [key, value] of exporter.getExportData()) { + exportData.set(key, value); + } + // console.log('exportData:', exportData); + + const check = (key, expected) => { + t.true(exportData.has(key)); + let value = exportData.get(key); + exportData.delete(key); + if (typeof expected === 'object') { + value = JSON.parse(value); + } + t.deepEqual(value, expected); + }; + + check('kv.key1', 'value1'); + check('snapshot.v1.2', { + vatID: 'v1', + snapPos: 2, + inUse: 0, + hash: snapHash, + }); + check('snapshot.v1.5', { + vatID: 'v1', + snapPos: 5, + inUse: 1, + hash: snapHash, + }); + check('snapshot.v1.current', 'snapshot.v1.5'); + const base = { vatID: 'v1', incarnation: 0, isCurrent: 0 }; + check('transcript.v1.0', { + ...base, + startPos: 0, + endPos: 3, + hash: spanHash1, + }); + check('transcript.v1.3', { + ...base, + startPos: 3, + endPos: 6, + hash: spanHash2, + }); + check('transcript.v1.current', { + ...base, + startPos: 6, + endPos: 8, + isCurrent: 1, + hash: spanHash3, + }); + check(`bundle.${bundle0ID}`, bundle0ID); + + // the above list is supposed to be exhaustive + if (exportData.size) { + console.log('unexpected exportData keys'); + console.log(exportData); + t.fail('unexpected exportData keys'); + } + } + + // artifacts + { + const names = new Set(); + const contents = new Map(); + for await (const name of exporter.getArtifactNames()) { + names.add(name); + contents.set(name, (await buffer(exporter.getArtifact(name))).toString()); + } + // console.log('artifacts:', contents); + + const check = async (name, expected) => { + t.true(names.has(name)); + names.delete(name); + let data = contents.get(name); + if (typeof expected === 'object') { + data = JSON.parse(data); + } + t.deepEqual(data, expected); + }; + + // export mode 'current' means we omit historical snapshots and + // transcript spans + + await check('snapshot.v1.5', 'snapshot data'); + await check('transcript.v1.6.8', 'load-snapshot\ndelivery3\n'); + await check(`bundle.${bundle0ID}`, bundle0); + + if (mode === 'archival' || mode === 'debug' || mode === 'debug-on-pruned') { + // adds the old transcript spans + await check( + 'transcript.v1.0.3', + 'start-worker\ndelivery1\nsave-snapshot\n', + ); + await check( + 'transcript.v1.3.6', + 'load-snapshot\ndelivery2\nsave-snapshot\n', + ); + } + + if (mode === 'debug') { + // adds the old snapshots, which are only present if + // initSwingStore() was given {keepSnapshots: true} + await check('snapshot.v1.2', 'snapshot data'); + // mode='debug-on-pruned' exercises the keepSnapshots:false case + } + + if (names.size) { + console.log(`unexpected artifacts:`); + console.log(names); + t.fail('unexpected artifacts'); + } + } +}); + +test('export current', exportTest, 'current'); +test('export archival', exportTest, 'archival'); +test('export debug', exportTest, 'debug'); +test('export debug-on-pruned', exportTest, 'debug-on-pruned'); + +test('export omits pruned span artifacts', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + // const dbDir = 't-db'; + + // use keepTranscripts=false to simulate an explicit prune of the + // old span + const options = { keepTranscripts: false }; + const ss1 = initSwingStore(dbDir, options); + const ks = ss1.kernelStorage; + + // build a DB with two spans, one is inUse, other is pruned + + ks.transcriptStore.initTranscript('v1'); + ks.transcriptStore.addItem('v1', 'start-worker'); // 0 + ks.transcriptStore.addItem('v1', 'delivery1'); // 1 + await ks.snapStore.saveSnapshot('v1', 2, getSnapshotStream(snapshotData)); + ks.transcriptStore.addItem('v1', 'save-snapshot'); // 2 + ks.transcriptStore.rolloverSpan('v1'); // range= 0..3 + const spanHash1 = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + // rolloverSpan prunes the contents of the old span + + ks.transcriptStore.addItem('v1', 'load-snapshot'); // 3 + ks.transcriptStore.addItem('v1', 'delivery2'); // 4 + const spanHash2 = + 'b26c8faf425c3c2738e0c5a5e9a7cd71075c68f0c9f2d6cdfd83c68204801dbb'; + + ss1.hostStorage.commit(); + + const exporter = makeSwingStoreExporter(dbDir, 'archival'); + + // exportData + { + const exportData = new Map(); + for await (const [key, value] of exporter.getExportData()) { + exportData.set(key, value); + } + // console.log('exportData:', exportData); + + const check = (key, expected) => { + t.true(exportData.has(key)); + let value = exportData.get(key); + exportData.delete(key); + if (typeof expected === 'object') { + value = JSON.parse(value); + } + t.deepEqual(value, expected); + }; + + check('snapshot.v1.2', { + vatID: 'v1', + snapPos: 2, + inUse: 1, + hash: snapHash, + }); + check('snapshot.v1.current', 'snapshot.v1.2'); + const base = { vatID: 'v1', incarnation: 0, isCurrent: 0 }; + check('transcript.v1.0', { + ...base, + startPos: 0, + endPos: 3, + hash: spanHash1, + }); + check('transcript.v1.current', { + ...base, + startPos: 3, + endPos: 5, + isCurrent: 1, + hash: spanHash2, + }); + + // the above list is supposed to be exhaustive + if (exportData.size) { + console.log('unexpected exportData keys'); + console.log(exportData); + t.fail('unexpected exportData keys'); + } + } + + // artifacts + { + const names = new Set(); + const contents = new Map(); + for await (const name of exporter.getArtifactNames()) { + names.add(name); + contents.set(name, (await buffer(exporter.getArtifact(name))).toString()); + } + // console.log('artifacts:', contents); + + const check = async (name, expected) => { + t.true(names.has(name)); + names.delete(name); + let data = contents.get(name); + if (typeof expected === 'object') { + data = JSON.parse(data); + } + t.deepEqual(data, expected); + }; + + // export mode 'archival' means we include all available + // historical snapshots and transcript spans + + await check('snapshot.v1.2', 'snapshot data'); + // no transcript.v1.0.3 because the contents were pruned + await check('transcript.v1.3.5', 'load-snapshot\ndelivery2\n'); + + if (names.size) { + console.log(`unexpected artifacts:`); + console.log(names); + t.fail('unexpected artifacts'); + } + } +}); diff --git a/packages/swing-store/test/test-exportImport.js b/packages/swing-store/test/test-exportImport.js index 8c30bfe19dc..ca4ffb71e2c 100644 --- a/packages/swing-store/test/test-exportImport.js +++ b/packages/swing-store/test/test-exportImport.js @@ -174,7 +174,6 @@ async function testExportImport( runMode, exportMode, importMode, - failureMode, expectedArtifactNames, ) { const exportLog = makeExportLog(); @@ -298,35 +297,20 @@ async function testExportImport( ], ]); - expectedArtifactNames = Array.from(expectedArtifactNames); - expectedArtifactNames.push(`bundle.${bundleIDA}`); - expectedArtifactNames.push(`bundle.${bundleIDB}`); + expectedArtifactNames = new Set(expectedArtifactNames); + expectedArtifactNames.add(`bundle.${bundleIDA}`); + expectedArtifactNames.add(`bundle.${bundleIDB}`); - const artifactNames = []; + const artifactNames = new Set(); for await (const name of exporter.getArtifactNames()) { - artifactNames.push(name); + artifactNames.add(name); } t.deepEqual(artifactNames, expectedArtifactNames); const includeHistorical = importMode !== 'current'; const beforeDump = debug.dump(keepSnapshots); - let ssIn; - try { - ssIn = await importSwingStore(exporter, null, { - includeHistorical, - }); - } catch (e) { - if (failureMode === 'transcript') { - t.is(e.message, 'artifact "transcript.vatA.0.3" is not available'); - return; - } else if (failureMode === 'snapshot') { - t.is(e.message, 'artifact "snapshot.vatA.2" is not available'); - return; - } - throw e; - } - t.is(failureMode, 'none'); + const ssIn = await importSwingStore(exporter, null, { includeHistorical }); await ssIn.hostStorage.commit(); const dumpsShouldMatch = runMode !== 'debug' || (exportMode === 'debug' && importMode !== 'current'); @@ -369,95 +353,95 @@ const A = 'archival'; const D = 'debug'; test('export and import data for state sync - current->current->current', async t => { - await testExportImport(t, C, C, C, 'none', expectedCurrentArtifacts); + await testExportImport(t, C, C, C, expectedCurrentArtifacts); }); test('export and import data for state sync - current->current->archival', async t => { - await testExportImport(t, C, C, A, 'none', expectedCurrentArtifacts); + await testExportImport(t, C, C, A, expectedCurrentArtifacts); }); test('export and import data for state sync - current->current->debug', async t => { - await testExportImport(t, C, C, D, 'none', expectedCurrentArtifacts); + await testExportImport(t, C, C, D, expectedCurrentArtifacts); }); test('export and import data for state sync - current->archival->current', async t => { - await testExportImport(t, C, A, C, 'none', expectedArchivalArtifacts); + await testExportImport(t, C, A, C, expectedCurrentArtifacts); }); test('export and import data for state sync - current->archival->archival', async t => { - await testExportImport(t, C, A, A, 'transcript', expectedArchivalArtifacts); + await testExportImport(t, C, A, A, expectedCurrentArtifacts); }); test('export and import data for state sync - current->archival->debug', async t => { - await testExportImport(t, C, A, D, 'transcript', expectedArchivalArtifacts); + await testExportImport(t, C, A, D, expectedCurrentArtifacts); }); test('export and import data for state sync - current->debug->current', async t => { - await testExportImport(t, C, D, C, 'none', expectedDebugArtifacts); + await testExportImport(t, C, D, C, expectedCurrentArtifacts); }); test('export and import data for state sync - current->debug->archival', async t => { - await testExportImport(t, C, D, A, 'snapshot', expectedDebugArtifacts); + await testExportImport(t, C, D, A, expectedCurrentArtifacts); }); test('export and import data for state sync - current->debug->debug', async t => { - await testExportImport(t, C, D, D, 'snapshot', expectedDebugArtifacts); + await testExportImport(t, C, D, D, expectedCurrentArtifacts); }); // ------------------------------------------------------------ test('export and import data for state sync - archival->current->current', async t => { - await testExportImport(t, A, C, C, 'none', expectedCurrentArtifacts); + await testExportImport(t, A, C, C, expectedCurrentArtifacts); }); test('export and import data for state sync - archival->current->archival', async t => { - await testExportImport(t, A, C, A, 'none', expectedCurrentArtifacts); + await testExportImport(t, A, C, A, expectedCurrentArtifacts); }); test('export and import data for state sync - archival->current->debug', async t => { - await testExportImport(t, A, C, D, 'none', expectedCurrentArtifacts); + await testExportImport(t, A, C, D, expectedCurrentArtifacts); }); test('export and import data for state sync - archival->archival->current', async t => { - await testExportImport(t, A, A, C, 'none', expectedArchivalArtifacts); + await testExportImport(t, A, A, C, expectedArchivalArtifacts); }); test('export and import data for state sync - archival->archival->archival', async t => { - await testExportImport(t, A, A, A, 'none', expectedArchivalArtifacts); + await testExportImport(t, A, A, A, expectedArchivalArtifacts); }); test('export and import data for state sync - archival->archival->debug', async t => { - await testExportImport(t, A, A, D, 'none', expectedArchivalArtifacts); + await testExportImport(t, A, A, D, expectedArchivalArtifacts); }); test('export and import data for state sync - archival->debug->current', async t => { - await testExportImport(t, A, D, C, 'none', expectedDebugArtifacts); + await testExportImport(t, A, D, C, expectedArchivalArtifacts); }); test('export and import data for state sync - archival->debug->archival', async t => { - await testExportImport(t, A, D, A, 'snapshot', expectedDebugArtifacts); + await testExportImport(t, A, D, A, expectedArchivalArtifacts); }); test('export and import data for state sync - archival->debug->debug', async t => { - await testExportImport(t, A, D, D, 'snapshot', expectedDebugArtifacts); + await testExportImport(t, A, D, D, expectedArchivalArtifacts); }); // ------------------------------------------------------------ test('export and import data for state sync - debug->current->current', async t => { - await testExportImport(t, D, C, C, 'none', expectedCurrentArtifacts); + await testExportImport(t, D, C, C, expectedCurrentArtifacts); }); test('export and import data for state sync - debug->current->archival', async t => { - await testExportImport(t, D, C, A, 'none', expectedCurrentArtifacts); + await testExportImport(t, D, C, A, expectedCurrentArtifacts); }); test('export and import data for state sync - debug->current->debug', async t => { - await testExportImport(t, D, C, D, 'none', expectedCurrentArtifacts); + await testExportImport(t, D, C, D, expectedCurrentArtifacts); }); test('export and import data for state sync - debug->archival->current', async t => { - await testExportImport(t, D, A, C, 'none', expectedArchivalArtifacts); + await testExportImport(t, D, A, C, expectedArchivalArtifacts); }); test('export and import data for state sync - debug->archival->archival', async t => { - await testExportImport(t, D, A, A, 'none', expectedArchivalArtifacts); + await testExportImport(t, D, A, A, expectedArchivalArtifacts); }); test('export and import data for state sync - debug->archival->debug', async t => { - await testExportImport(t, D, A, D, 'none', expectedArchivalArtifacts); + await testExportImport(t, D, A, D, expectedArchivalArtifacts); }); test('export and import data for state sync - debug->debug->current', async t => { - await testExportImport(t, D, D, C, 'none', expectedDebugArtifacts); + await testExportImport(t, D, D, C, expectedDebugArtifacts); }); test('export and import data for state sync - debug->debug->archival', async t => { - await testExportImport(t, D, D, A, 'none', expectedDebugArtifacts); + await testExportImport(t, D, D, A, expectedDebugArtifacts); }); test('export and import data for state sync - debug->debug->debug', async t => { - await testExportImport(t, D, D, D, 'none', expectedDebugArtifacts); + await testExportImport(t, D, D, D, expectedDebugArtifacts); }); diff --git a/packages/swing-store/test/test-import.js b/packages/swing-store/test/test-import.js new file mode 100644 index 00000000000..040ea476695 --- /dev/null +++ b/packages/swing-store/test/test-import.js @@ -0,0 +1,476 @@ +// @ts-check + +import '@endo/init/debug.js'; + +import path from 'path'; +import { createGunzip } from 'zlib'; +import { Readable } from 'stream'; +import { Buffer } from 'buffer'; + +import sqlite3 from 'better-sqlite3'; +import test from 'ava'; +import { decodeBase64 } from '@endo/base64'; + +import { buffer } from '../src/util.js'; +import { importSwingStore, makeSwingStoreExporter } from '../src/index.js'; + +import { tmpDir, makeB0ID } from './util.js'; + +const snapshotData = 'snapshot data'; +// this snapHash was computed manually +const snapHash = + 'e7dee7266896538616b630a5da40a90e007726a383e005a9c9c5dd0c2daf9329'; + +/** @type {import('../src/bundleStore.js').Bundle} */ +const bundle0 = { moduleFormat: 'nestedEvaluate', source: '1+1' }; +const bundle0ID = makeB0ID(bundle0); + +function convert(orig) { + const bundles = Object.fromEntries( + Object.entries(orig.bundles).map(([bundleID, encBundle]) => { + const s = new TextDecoder().decode(decodeBase64(encBundle)); + assert(bundleID.startsWith('b0-'), bundleID); + const bundle = JSON.parse(s); + return [bundleID, bundle]; + }), + ); + return { ...orig, bundles }; +} + +/** + * @typedef { import('../src/exporter').KVPair } KVPair + */ + +/** + * @param { Map } exportData + * @param { Map } artifacts + */ +function makeExporter(exportData, artifacts) { + return { + async *getExportData() { + for (const [key, value] of exportData.entries()) { + /** @type { KVPair } */ + const pair = [key, value]; + yield pair; + } + }, + async *getArtifactNames() { + for (const name of artifacts.keys()) { + yield name; + } + }, + async *getArtifact(name) { + const data = artifacts.get(name); + assert(data, `missing artifact ${name}`); + yield Buffer.from(data); + }, + // eslint-disable-next-line no-empty-function + async close() {}, + }; +} + +test('import empty', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + const exporter = makeExporter(new Map(), new Map()); + const ss = await importSwingStore(exporter, dbDir); + ss.hostStorage.commit(); + const data = convert(ss.debug.dump()); + t.deepEqual(data, { + kvEntries: {}, + transcripts: {}, + snapshots: {}, + bundles: {}, + }); +}); + +function buildData() { + // build an export manually + const exportData = new Map(); + const artifacts = new Map(); + + // shadow kvStore + exportData.set('kv.key1', 'value1'); + + // now add artifacts and metadata in pairs + + artifacts.set(`bundle.${bundle0ID}`, JSON.stringify(bundle0)); + exportData.set(`bundle.${bundle0ID}`, bundle0ID); + + const sbase = { vatID: 'v1', hash: snapHash, inUse: 0 }; + const tbase = { vatID: 'v1', startPos: 0, isCurrent: 0, incarnation: 0 }; + const addTS = (key, obj) => + exportData.set(key, JSON.stringify({ ...tbase, ...obj })); + const t0hash = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + const t3hash = + '1947001e78e01bd1e773feb22b4ffc530447373b9de9274d5d5fbda3f23dbf2b'; + const t6hash = + 'e6b42c6a3fb94285a93162f25a9fc0145fd4c5bb144917dc572c50ae2d02ee69'; + + addTS(`transcript.v1.0`, { endPos: 3, hash: t0hash }); + artifacts.set( + `transcript.v1.0.3`, + 'start-worker\ndelivery1\nsave-snapshot\n', + ); + exportData.set(`snapshot.v1.2`, JSON.stringify({ ...sbase, snapPos: 2 })); + artifacts.set(`snapshot.v1.2`, snapshotData); + + addTS(`transcript.v1.3`, { startPos: 3, endPos: 6, hash: t3hash }); + artifacts.set( + 'transcript.v1.3.6', + 'load-snapshot\ndelivery2\nsave-snapshot\n', + ); + exportData.set( + `snapshot.v1.5`, + JSON.stringify({ ...sbase, snapPos: 5, inUse: 1 }), + ); + artifacts.set(`snapshot.v1.5`, snapshotData); + + artifacts.set('transcript.v1.6.8', 'load-snapshot\ndelivery3\n'); + exportData.set(`snapshot.v1.current`, 'snapshot.v1.5'); + addTS(`transcript.v1.current`, { + startPos: 6, + endPos: 8, + isCurrent: 1, + hash: t6hash, + }); + + return { exportData, artifacts, t0hash, t3hash, t6hash }; +} + +const importTest = test.macro(async (t, mode) => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const { exportData, artifacts, t0hash, t3hash, t6hash } = buildData(); + + const exporter = makeExporter(exportData, artifacts); + + // now import + const includeHistorical = mode === 'historical'; + const options = { includeHistorical }; + const ss = await importSwingStore(exporter, dbDir, options); + ss.hostStorage.commit(); + const data = convert(ss.debug.dump()); + + const convertTranscript = (items, startPos = 0) => { + const out = {}; + let pos = startPos; + for (const item of items) { + out[pos] = item; + pos += 1; + } + return out; + }; + + const convertSnapshots = async allVatSnapshots => { + const out = {}; + for await (const [vatID, snapshots] of Object.entries(allVatSnapshots)) { + const convertedSnapshots = []; + for await (const snapshot of snapshots) { + if (!snapshot.compressedSnapshot) { + continue; + } + const gzReader = Readable.from(snapshot.compressedSnapshot); + const unzipper = createGunzip(); + const snapshotReader = gzReader.pipe(unzipper); + const uncompressedSnapshot = await buffer(snapshotReader); + const converted = { ...snapshot, uncompressedSnapshot }; + delete converted.compressedSnapshot; + convertedSnapshots.push(converted); + } + out[vatID] = convertedSnapshots; + } + return out; + }; + + t.deepEqual(data.kvEntries, { key1: 'value1' }); + let ts = []; + let tsStart = 6; // start of current span + if (mode === 'historical') { + tsStart = 0; // historical means we get all spans + ts = ts.concat(['start-worker', 'delivery1', 'save-snapshot']); // 0,1,2 + ts = ts.concat(['load-snapshot', 'delivery2', 'save-snapshot']); // 3,4,5 + } + ts = ts.concat(['load-snapshot', 'delivery3']); // 6,7 + + const expectedTranscript = convertTranscript(ts, tsStart); + t.deepEqual(data.transcripts, { v1: expectedTranscript }); + const uncompressedSnapshot = Buffer.from(snapshotData); + const expectedSnapshots = []; + if (mode === 'historical') { + expectedSnapshots.push({ + uncompressedSnapshot, + hash: snapHash, + inUse: 0, + snapPos: 2, + }); + } + expectedSnapshots.push({ + uncompressedSnapshot, + hash: snapHash, + inUse: 1, + snapPos: 5, + }); + t.deepEqual(await convertSnapshots(data.snapshots), { + v1: expectedSnapshots, + }); + t.deepEqual(data.bundles, { [bundle0ID]: bundle0 }); + + // look directly at the DB to confirm presence of metadata rows + const db = sqlite3(path.join(dbDir, 'swingstore.sqlite')); + const spanRows = [ + ...db.prepare('SELECT * FROM transcriptSpans ORDER BY startPos').iterate(), + ]; + t.deepEqual( + spanRows.map(sr => sr.startPos), + [0, 3, 6], + ); + + // and a new export should include all metadata, regardless of import mode + + const reExporter = makeSwingStoreExporter(dbDir, 'current'); + const reExportData = new Map(); + for await (const [key, value] of reExporter.getExportData()) { + reExportData.set(key, value); + } + // console.log(reExportData); + + const check = (key, expected) => { + t.true(reExportData.has(key), `missing exportData ${key}`); + let value = reExportData.get(key); + reExportData.delete(key); + if (typeof expected === 'object') { + value = JSON.parse(value); + } + t.deepEqual(value, expected); + }; + + check('kv.key1', 'value1'); + check('snapshot.v1.2', { vatID: 'v1', snapPos: 2, inUse: 0, hash: snapHash }); + check('snapshot.v1.5', { vatID: 'v1', snapPos: 5, inUse: 1, hash: snapHash }); + check('snapshot.v1.current', 'snapshot.v1.5'); + const base = { vatID: 'v1', incarnation: 0, isCurrent: 0 }; + check('transcript.v1.0', { ...base, startPos: 0, endPos: 3, hash: t0hash }); + check('transcript.v1.3', { ...base, startPos: 3, endPos: 6, hash: t3hash }); + check('transcript.v1.current', { + ...base, + startPos: 6, + endPos: 8, + isCurrent: 1, + hash: t6hash, + }); + check(`bundle.${bundle0ID}`, bundle0ID); + + // the above list is supposed to be exhaustive + if (reExportData.size) { + console.log(reExportData); + t.fail('unexpected exportData keys'); + } +}); + +test('import current', importTest, 'current'); +test('import historical', importTest, 'historical'); + +test('import is missing bundle', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + exportData.set(`bundle.${bundle0ID}`, bundle0ID); + // but there is no artifact to match + const exporter = makeExporter(exportData, new Map()); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /missing bundles for:/, + }); +}); + +test('import is missing snapshot', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + exportData.set( + `snapshot.v1.2`, + JSON.stringify({ vatID: 'v1', hash: snapHash, inUse: 1, snapPos: 2 }), + ); + // but there is no artifact to match + const exporter = makeExporter(exportData, new Map()); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /current snapshots are pruned for vats/, + }); +}); + +test('import is missing transcript span', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + const t0hash = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + exportData.set( + `transcript.v1.current`, + JSON.stringify({ + vatID: 'v1', + startPos: 0, + endPos: 3, + hash: t0hash, + isCurrent: 1, + incarnation: 0, + }), + ); + // but there is no artifact to match + const exporter = makeExporter(exportData, new Map()); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /incomplete current transcript/, + }); +}); + +test('import has mismatched transcript span', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + const t0hash = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + exportData.set( + `transcript.v1.current`, + JSON.stringify({ + vatID: 'v1', + startPos: 0, + endPos: 3, + hash: t0hash, + isCurrent: 0, // mismatch + incarnation: 0, + }), + ); + const exporter = makeExporter(exportData, new Map()); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /transcript key "transcript.v1.current" mismatches metadata/, + }); +}); + +test('import has incomplete transcript span', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + const artifacts = new Map(); + const t0hash = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + exportData.set( + `transcript.v1.current`, + JSON.stringify({ + vatID: 'v1', + startPos: 0, + endPos: 4, // expect 4 items + hash: t0hash, + isCurrent: 1, + incarnation: 0, + }), + ); + // but artifact only contains 3 + artifacts.set( + `transcript.v1.0.4`, + 'start-worker\ndelivery1\nsave-snapshot\n', + ); + + const exporter = makeExporter(exportData, artifacts); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /artifact "transcript.v1.0.4" is not complete/, + }); +}); + +test('import has corrupt transcript span', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + const artifacts = new Map(); + const t0hash = + '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; + exportData.set( + `transcript.v1.current`, + JSON.stringify({ + vatID: 'v1', + startPos: 0, + endPos: 3, + hash: t0hash, + isCurrent: 1, + incarnation: 0, + }), + ); + artifacts.set( + `transcript.v1.0.3`, + 'start-worker\nBAD-DELIVERY1\nsave-snapshot\n', + ); + + const exporter = makeExporter(exportData, artifacts); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /artifact "transcript.v1.0.3" hash is.*metadata says/, + }); +}); + +test('import has corrupt snapshot', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + const artifacts = new Map(); + exportData.set( + `snapshot.v1.2`, + JSON.stringify({ + vatID: 'v1', + snapPos: 2, + hash: snapHash, + inUse: 1, + }), + ); + artifacts.set('snapshot.v1.2', `${snapshotData}WRONG`); + + const exporter = makeExporter(exportData, artifacts); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /snapshot "snapshot.v1.2" hash is.*metadata says/, + }); +}); + +test('import has corrupt bundle', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + const artifacts = new Map(); + exportData.set(`bundle.${bundle0ID}`, bundle0ID); + const badBundle = { ...bundle0, source: 'WRONG' }; + artifacts.set(`bundle.${bundle0ID}`, JSON.stringify(badBundle)); + + const exporter = makeExporter(exportData, artifacts); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /bundleID ".*" does not match bundle artifact/, + }); +}); + +test('import has unknown metadata tag', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const exportData = new Map(); + exportData.set(`unknown.v1.current`, 'value'); + const exporter = makeExporter(exportData, new Map()); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /unknown export-data type "unknown" on import/, + }); +}); + +test('import has unknown artifact tag', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const artifacts = new Map(); + artifacts.set('unknown.v1.current', 'value'); + const exporter = makeExporter(new Map(), artifacts); + await t.throwsAsync(async () => importSwingStore(exporter, dbDir), { + message: /unknown artifact type "unknown" on import/, + }); +}); diff --git a/packages/swing-store/test/util.js b/packages/swing-store/test/util.js new file mode 100644 index 00000000000..615cd3af567 --- /dev/null +++ b/packages/swing-store/test/util.js @@ -0,0 +1,26 @@ +import { Buffer } from 'node:buffer'; +import tmp from 'tmp'; +import { createSHA256 } from '../src/hasher.js'; + +/** + * @param {string} [prefix] + * @returns {Promise<[string, () => void]>} + */ +export const tmpDir = prefix => + new Promise((resolve, reject) => { + tmp.dir({ unsafeCleanup: true, prefix }, (err, name, removeCallback) => { + if (err) { + reject(err); + } else { + resolve([name, removeCallback]); + } + }); + }); + +export async function* getSnapshotStream(contents) { + yield Buffer.from(contents); +} + +export function makeB0ID(bundle) { + return `b0-${createSHA256(JSON.stringify(bundle)).finish()}`; +} From 8cdffc6a383a51f9318012a33aaafe34eb42e1af Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Wed, 26 Jul 2023 19:19:13 -0600 Subject: [PATCH 077/109] fix(upgrade-test): abort if proposal is rejected --- .../upgrade-test/upgrade-test-scripts/env_setup.sh | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh index 150dc4a1a66..feb055da6ec 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/env_setup.sh @@ -202,12 +202,18 @@ voteLatestProposalAndWait() { while true; do status=$($binary q gov proposal $proposal -ojson | jq -r .status) - if [ "$status" == "PROPOSAL_STATUS_PASSED" ]; then + case $status in + PROPOSAL_STATUS_PASSED) break - else - echo "Waiting for proposal to pass" + ;; + PROPOSAL_STATUS_REJECTED) + echo "Proposal rejected" + exit 1 + ;; + *) + echo "Waiting for proposal to pass (status=$status)" sleep 1 - fi + esac done } From 5b2d19d1153a23c118afb14ca4ed80e175640f62 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Mon, 7 Aug 2023 18:21:52 -0700 Subject: [PATCH 078/109] feat(swingstore): add repairMetadata() Since bug #8025 caused swingstore imports to omit historical metadata, there may be deployed instances that are missing data. To fix this, the new `hostStorage.repairMetadata()` method takes an (intact) exporter, and will re-create any metadata records that don't already exist. This function ignores artifacts entirely. The method will throw if the metadata stream has records that already exist in the DB but whose contents do not match. It also throws if the stream creates a new bundle record, since without artifacts that new record cannot possibly be populated, and the data model requires all bundles be present. The method ignores any kvStore records (which are not really metadata). Afterwards, the caller is responsible for calling `hostStorage.commit()` when they are ready. refs #8025 --- packages/swing-store/src/bundleStore.js | 19 +++ packages/swing-store/src/repairMetadata.js | 65 +++++++++ packages/swing-store/src/snapStore.js | 29 ++++ packages/swing-store/src/swingStore.js | 7 + packages/swing-store/src/transcriptStore.js | 37 +++++ packages/swing-store/test/test-import.js | 4 +- .../swing-store/test/test-repair-metadata.js | 131 ++++++++++++++++++ 7 files changed, 290 insertions(+), 2 deletions(-) create mode 100644 packages/swing-store/src/repairMetadata.js create mode 100644 packages/swing-store/test/test-repair-metadata.js diff --git a/packages/swing-store/src/bundleStore.js b/packages/swing-store/src/bundleStore.js index de7d9b3471b..4b2de774ac3 100644 --- a/packages/swing-store/src/bundleStore.js +++ b/packages/swing-store/src/bundleStore.js @@ -26,6 +26,7 @@ import { createSHA256 } from './hasher.js'; * * @typedef {{ * exportBundle: (name: string) => AsyncIterableIterator, + * repairBundleRecord: (key: string, value: string) => void, * importBundleRecord: (key: string, value: string) => void, * importBundle: (name: string, dataProvider: () => Promise) => Promise, * assertComplete: (level: 'operational') => void, @@ -229,6 +230,23 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { addBundleRecord(bundleID); } + function repairBundleRecord(key, value) { + // Bundle records have no metadata, and all bundles must be + // present (there's no notion of "historical bundle"). So there's + // no "repair", and if the repair process supplies a bundle record + // that isn't already present, we throw an error. The repair + // process doesn't get artifacts, so adding a new record here + // would fail the subsequent completeness check anyways. + + const bundleID = bundleIDFromName(key); + assert.equal(bundleID, value); + if (sqlHasBundleRecord.get(bundleID)) { + // record is present, there's no metadata to mismatch, so ignore quietly + return; + } + throw Fail`unexpected new bundle record for ${bundleID} during repair`; + } + /** * Read a bundle and return it as a stream of data suitable for export to * another store. @@ -361,6 +379,7 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { getArtifactNames, exportBundle, getBundleIDs, + repairBundleRecord, dumpBundles, }); diff --git a/packages/swing-store/src/repairMetadata.js b/packages/swing-store/src/repairMetadata.js new file mode 100644 index 00000000000..dc82b0c2f2f --- /dev/null +++ b/packages/swing-store/src/repairMetadata.js @@ -0,0 +1,65 @@ +import { Fail, q } from '@agoric/assert'; +import { assertComplete } from './assertComplete.js'; + +/** + * Given a pre-existing swingstore and a SwingStoreExporter, read in + * all the metadata from the exporter and use it to regenerate any + * missing metadata records. This can be used to fix the damage caused + * by #8025. + * + * The repair method will call `exporter.getExportData` and examine + * all entries to do one of three things: + * + * 1: kvStore records are ignored (they are not metadata) + * 2: bundle/snapshot/transcript records whose keys already exist will + * be compared against the existing data, and an error thrown if + * they do not match + * 3: new snapshot/transcript records will be silently added to + * the swingstore (new bundle records are an error, since we do not + * tolerate pruned bundles) + * + * It will not call `exporter.getArtifactNames` or `getArtifacts`. + * + * At the end of the process, the DB will contain pending changes in + * an open transaction. The caller is responsible for calling + * `hostStorage.commit()` when they are ready. + * + * @param {import('./internal.js').SwingStoreInternal} internal + * @param {import('./exporter').SwingStoreExporter} exporter + * @returns {Promise} + */ +export async function doRepairMetadata(internal, exporter) { + // first we strip kvStore entries and deduplicate the rest + + const allMetadata = new Map(); + + for await (const [key, value] of exporter.getExportData()) { + const [tag] = key.split('.', 1); + if (tag === 'kv') { + continue; + } else if (value == null) { + allMetadata.delete(key); + } else { + allMetadata.set(key, value); + } + } + + // then process the metadata records + + for (const [key, value] of allMetadata.entries()) { + const [tag] = key.split('.', 1); + if (tag === 'bundle') { + internal.bundleStore.repairBundleRecord(key, value); + } else if (tag === 'snapshot') { + internal.snapStore.repairSnapshotRecord(key, value); + } else if (tag === 'transcript') { + internal.transcriptStore.repairTranscriptSpanRecord(key, value); + } else { + Fail`unknown export-data type in key ${q(key)} on repairMetadata`; + } + } + + // and do a completeness check + assertComplete(internal, 'operational'); + await exporter.close(); +} diff --git a/packages/swing-store/src/snapStore.js b/packages/swing-store/src/snapStore.js index 4cff5e2a6d7..0be13ee14c0 100644 --- a/packages/swing-store/src/snapStore.js +++ b/packages/swing-store/src/snapStore.js @@ -48,6 +48,7 @@ import { buffer } from './util.js'; * importSnapshotRecord: (key: string, value: string) => void, * populateSnapshot: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { includeHistorical: boolean }) => Promise, * assertComplete: (level: 'operational') => void, + * repairSnapshotRecord: (key: string, value: string) => void, * }} SnapStoreInternal * * @typedef {{ @@ -526,6 +527,33 @@ export function makeSnapStore( WHERE vatID = ? AND snapPos = ? `); + function repairSnapshotRecord(key, value) { + ensureTxn(); + const [tag, keyVatID, keySnapPos] = key.split('.'); + assert.equal(tag, 'snapshot'); + if (keySnapPos === 'current') { + // "snapshot.${vatID}.current" entries are meta-metadata: they + // point to the metadata key of the current snapshot, to avoid + // the need for an expensive search + return; + } + const metadata = JSON.parse(value); + const { vatID, snapPos, hash, inUse } = metadata; + assert.equal(keyVatID, vatID); + assert.equal(Number(keySnapPos), snapPos); + const existing = sqlGetSnapshotHashFor.get(vatID, snapPos); + if (existing) { + if ( + Boolean(existing.inUse) !== Boolean(inUse) || + existing.hash !== hash + ) { + throw Fail`repairSnapshotRecord metadata mismatch: ${existing} vs ${metadata}`; + } + } else { + sqlAddSnapshotRecord.run(vatID, snapPos, hash, inUse ? 1 : null); + } + } + const sqlPopulateSnapshot = db.prepare(` UPDATE snapshots SET uncompressedSize = ?, compressedSize = ?, compressedSnapshot = ? @@ -665,6 +693,7 @@ export function makeSnapStore( importSnapshotRecord, populateSnapshot, assertComplete, + repairSnapshotRecord, hasHash, listAllSnapshots, diff --git a/packages/swing-store/src/swingStore.js b/packages/swing-store/src/swingStore.js index 4d7a254ab6f..368fb1c5a54 100644 --- a/packages/swing-store/src/swingStore.js +++ b/packages/swing-store/src/swingStore.js @@ -14,6 +14,7 @@ import { makeSnapStore } from './snapStore.js'; import { makeBundleStore } from './bundleStore.js'; import { createSHA256 } from './hasher.js'; import { makeSnapStoreIO } from './snapStoreIO.js'; +import { doRepairMetadata } from './repairMetadata.js'; /** * @typedef { import('./kvStore').KVStore } KVStore @@ -48,6 +49,7 @@ import { makeSnapStoreIO } from './snapStoreIO.js'; * close: () => Promise, // shutdown the store, abandoning any uncommitted changes * diskUsage?: () => number, // optional stats method * setExportCallback: (cb: (updates: KVPair[]) => void) => void, // Set a callback invoked by swingStore when new serializable data is available for export + * repairMetadata: (exporter: import('./exporter').SwingStoreExporter) => Promise, * }} SwingStoreHostStorage */ @@ -477,6 +479,10 @@ export function makeSwingStore(dirPath, forceReset, options = {}) { /** @type {import('./internal.js').SwingStoreInternal} */ const internal = harden({ snapStore, transcriptStore, bundleStore }); + async function repairMetadata(exporter) { + return doRepairMetadata(internal, exporter); + } + /** * Return a Buffer with the entire DB state, useful for cloning a * small swingstore in unit tests. @@ -548,6 +554,7 @@ export function makeSwingStore(dirPath, forceReset, options = {}) { getActivityhash, }; const hostStorage = { + repairMetadata, kvStore: hostKVStore, commit, close, diff --git a/packages/swing-store/src/transcriptStore.js b/packages/swing-store/src/transcriptStore.js index b0ffcb605cd..3ff700d0c0f 100644 --- a/packages/swing-store/src/transcriptStore.js +++ b/packages/swing-store/src/transcriptStore.js @@ -28,6 +28,7 @@ import { createSHA256 } from './hasher.js'; * importTranscriptSpanRecord: (key: string, value: string) => void, * populateTranscriptSpan: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { includeHistorical: boolean }) => Promise, * assertComplete: (level: 'operational') => void, + * repairTranscriptSpanRecord: (key: string, value: string) => void, * readFullVatTranscript: (vatID: string) => Iterable<{position: number, item: string}> * }} TranscriptStoreInternal * @@ -643,6 +644,41 @@ export function makeTranscriptStore( // commit. So we're done. } + function repairTranscriptSpanRecord(key, value) { + ensureTxn(); + const [tag, keyVatID, keyStartPos] = key.split('.'); + assert.equal(tag, 'transcript'); + const metadata = JSON.parse(value); + const { vatID, startPos, endPos, hash, isCurrent, incarnation } = metadata; + assert.equal(keyVatID, vatID); + if (keyStartPos !== 'current') { + if (Number(keyStartPos) !== startPos) { + Fail`transcript key ${key} mismatches metadata ${metadata}`; + } + } + + const existing = sqlGetSpanMetadataFor.get(vatID, startPos); + if (existing) { + if ( + Boolean(existing.isCurrent) !== Boolean(isCurrent) || + existing.hash !== hash || + existing.incarnation !== incarnation || + existing.endPos !== endPos + ) { + throw Fail`repairTranscriptSpanRecord metadata mismatch: ${existing} vs ${metadata}`; + } + } else { + sqlWriteSpan.run( + vatID, + startPos, + endPos, + hash, + isCurrent ? 1 : null, + incarnation, + ); + } + } + function assertComplete(level) { assert.equal(level, 'operational'); // for now // every 'isCurrent' transcript span must have all items @@ -676,6 +712,7 @@ export function makeTranscriptStore( importTranscriptSpanRecord, populateTranscriptSpan, assertComplete, + repairTranscriptSpanRecord, dumpTranscripts, readFullVatTranscript, diff --git a/packages/swing-store/test/test-import.js b/packages/swing-store/test/test-import.js index 040ea476695..ccdfde23446 100644 --- a/packages/swing-store/test/test-import.js +++ b/packages/swing-store/test/test-import.js @@ -45,7 +45,7 @@ function convert(orig) { * @param { Map } exportData * @param { Map } artifacts */ -function makeExporter(exportData, artifacts) { +export function makeExporter(exportData, artifacts) { return { async *getExportData() { for (const [key, value] of exportData.entries()) { @@ -84,7 +84,7 @@ test('import empty', async t => { }); }); -function buildData() { +export function buildData() { // build an export manually const exportData = new Map(); const artifacts = new Map(); diff --git a/packages/swing-store/test/test-repair-metadata.js b/packages/swing-store/test/test-repair-metadata.js new file mode 100644 index 00000000000..1629c9fdcc8 --- /dev/null +++ b/packages/swing-store/test/test-repair-metadata.js @@ -0,0 +1,131 @@ +// @ts-check + +import '@endo/init/debug.js'; + +import path from 'path'; +import test from 'ava'; +import sqlite3 from 'better-sqlite3'; + +import { importSwingStore } from '../src/index.js'; + +import { makeExporter, buildData } from './test-import.js'; +import { tmpDir } from './util.js'; + +test('repair metadata', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const { exportData, artifacts } = buildData(); + + // simulate a swingstore broken by #8025 by importing everything, + // then manually deleting the historical metadata entries from the + // DB + const exporter = makeExporter(exportData, artifacts); + const ss = await importSwingStore(exporter, dbDir); + await ss.hostStorage.commit(); + + const filePath = path.join(dbDir, 'swingstore.sqlite'); + const db = sqlite3(filePath); + + const getTS = db.prepare( + 'SELECT startPos FROM transcriptSpans WHERE vatID = ? ORDER BY startPos', + ); + getTS.pluck(); + const getSS = db.prepare( + 'SELECT snapPos FROM snapshots WHERE vatID = ? ORDER BY snapPos', + ); + getSS.pluck(); + + // assert that all the metadata is there at first + const ts1 = getTS.all('v1'); + t.deepEqual(ts1, [0, 3, 6]); // three spans + const ss1 = getSS.all('v1'); + t.deepEqual(ss1, [2, 5]); // two snapshots + + // now clobber them to simulate #8025 (note: these auto-commit) + db.prepare('DELETE FROM transcriptSpans WHERE isCurrent IS NULL').run(); + db.prepare('DELETE FROM snapshots WHERE inUSE IS NULL').run(); + + // confirm that we clobbered them + const ts2 = getTS.all('v1'); + t.deepEqual(ts2, [6]); // only the latest + const ss2 = getSS.all('v1'); + t.deepEqual(ss2, [5]); + + // now fix it + await ss.hostStorage.repairMetadata(exporter); + await ss.hostStorage.commit(); + + // and check that the metadata is back + const ts3 = getTS.all('v1'); + t.deepEqual(ts3, [0, 3, 6]); // all three again + const ss3 = getSS.all('v1'); + t.deepEqual(ss3, [2, 5]); + + // repair should be idempotent + await ss.hostStorage.repairMetadata(exporter); + + const ts4 = getTS.all('v1'); + t.deepEqual(ts4, [0, 3, 6]); // still there + const ss4 = getSS.all('v1'); + t.deepEqual(ss4, [2, 5]); +}); + +test('repair metadata ignores kvStore entries', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const { exportData, artifacts } = buildData(); + + const exporter = makeExporter(exportData, artifacts); + const ss = await importSwingStore(exporter, dbDir); + await ss.hostStorage.commit(); + + // perform the repair with spurious kv entries + exportData.set('kv.key2', 'value2'); + await ss.hostStorage.repairMetadata(exporter); + await ss.hostStorage.commit(); + + // the spurious kv entry should be ignored + t.deepEqual(ss.debug.dump().kvEntries, { key1: 'value1' }); +}); + +test('repair metadata rejects mismatched snapshot entries', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const { exportData, artifacts } = buildData(); + + const exporter = makeExporter(exportData, artifacts); + const ss = await importSwingStore(exporter, dbDir); + await ss.hostStorage.commit(); + + // perform the repair with mismatched snapshot entry + const old = JSON.parse(exportData.get('snapshot.v1.2')); + const wrong = { ...old, hash: 'wrong' }; + exportData.set('snapshot.v1.2', JSON.stringify(wrong)); + + await t.throwsAsync(async () => ss.hostStorage.repairMetadata(exporter), { + message: /repairSnapshotRecord metadata mismatch/, + }); +}); + +test('repair metadata rejects mismatched transcript span', async t => { + const [dbDir, cleanup] = await tmpDir('testdb'); + t.teardown(cleanup); + + const { exportData, artifacts } = buildData(); + + const exporter = makeExporter(exportData, artifacts); + const ss = await importSwingStore(exporter, dbDir); + await ss.hostStorage.commit(); + + // perform the repair with mismatched transcript span entry + const old = JSON.parse(exportData.get('transcript.v1.0')); + const wrong = { ...old, hash: 'wrong' }; + exportData.set('transcript.v1.0', JSON.stringify(wrong)); + + await t.throwsAsync(async () => ss.hostStorage.repairMetadata(exporter), { + message: /repairTranscriptSpanRecord metadata mismatch/, + }); +}); From 28db1dc54b72c50697492fa145a85dc2adae10a7 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Fri, 28 Jul 2023 23:31:57 -0600 Subject: [PATCH 079/109] test(upgrade-test): default init-network core proposal for agoric-upgrade-11 --- packages/deployment/upgrade-test/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index d57e2402322..b428e25c87b 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -74,7 +74,9 @@ ARG DEST_IMAGE #this is agoric-upgrade-10 upgrading to 11 #it's a separate target because agoric-upgrade-10 takes so long to test FROM ghcr.io/agoric/agoric-sdk:34 as agoric-upgrade-10-to-11 -ARG BOOTSTRAP_MODE UPGRADE_INFO_11 +# This default UPGRADE_INFO_11 is to test core proposals like the network vat. +# TODO: Maybe replace with a Zoe core proposal, or remove when other paths test it. +ARG BOOTSTRAP_MODE UPGRADE_INFO_11='{"coreProposals":["@agoric/vats/scripts/init-network.js"]}' ENV THIS_NAME=agoric-upgrade-10-to-11 UPGRADE_TO=agoric-upgrade-11 UPGRADE_INFO=${UPGRADE_INFO_11} BOOTSTRAP_MODE=${BOOTSTRAP_MODE} WORKDIR /usr/src/agoric-sdk/ From 981fa019a22bdbd8c512f2ed6b80941e5b8d11b6 Mon Sep 17 00:00:00 2001 From: Raphael Salas Date: Tue, 8 Aug 2023 14:14:54 -0400 Subject: [PATCH 080/109] fix(upgrade-test): use correct mainnet-1b image --- packages/deployment/upgrade-test/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index b00ccf38aac..34d01cabd0f 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -57,7 +57,7 @@ RUN . ./upgrade-test-scripts/start_to_to.sh ARG DEST_IMAGE #this is agoric-upgrade-10 / vaults -FROM ghcr.io/agoric/agoric-sdk:34 as agoric-upgrade-10 +FROM ghcr.io/agoric/agoric-sdk:35 as agoric-upgrade-10 ARG BOOTSTRAP_MODE ENV THIS_NAME=agoric-upgrade-10 UPGRADE_TO=agoric-upgrade-11 BOOTSTRAP_MODE=${BOOTSTRAP_MODE} From 47a6b70c82997e91cdba88f50f5bd61e17477bbf Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Thu, 3 Aug 2023 12:02:31 -0600 Subject: [PATCH 081/109] fix(upgrade-test): use `agoric-upgrade-10` tag 35 --- packages/deployment/upgrade-test/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/Dockerfile b/packages/deployment/upgrade-test/Dockerfile index b428e25c87b..6ef270a54e3 100644 --- a/packages/deployment/upgrade-test/Dockerfile +++ b/packages/deployment/upgrade-test/Dockerfile @@ -73,7 +73,7 @@ RUN . ./upgrade-test-scripts/start_to_to.sh ARG DEST_IMAGE #this is agoric-upgrade-10 upgrading to 11 #it's a separate target because agoric-upgrade-10 takes so long to test -FROM ghcr.io/agoric/agoric-sdk:34 as agoric-upgrade-10-to-11 +FROM ghcr.io/agoric/agoric-sdk:35 as agoric-upgrade-10-to-11 # This default UPGRADE_INFO_11 is to test core proposals like the network vat. # TODO: Maybe replace with a Zoe core proposal, or remove when other paths test it. ARG BOOTSTRAP_MODE UPGRADE_INFO_11='{"coreProposals":["@agoric/vats/scripts/init-network.js"]}' From a4fe29080169cc1bc4295e713c08a37343cd8b49 Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Tue, 8 Aug 2023 16:37:36 -0600 Subject: [PATCH 082/109] fix(upgrade-test): use default `$DEBUG` for docker run --- packages/deployment/upgrade-test/Makefile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/deployment/upgrade-test/Makefile b/packages/deployment/upgrade-test/Makefile index cd9c24bca50..8514195248f 100644 --- a/packages/deployment/upgrade-test/Makefile +++ b/packages/deployment/upgrade-test/Makefile @@ -46,7 +46,15 @@ build: $(TARGET) build_test: BOOTSTRAP_MODE=test build_test: $(TARGET) +DEBUG ?= SwingSet:ls,SwingSet:vat +RUN = docker run --rm -it \ + -p 26656:26656 -p 26657:26657 -p 1317:1317 \ + -v "$${PWD}:/workspace" \ + -e "DEST=1" -e "DEBUG=$(DEBUG)" + run: - docker run --rm -it -e "DEST=1" -e "TMUX_USE_CC=$(tmuxCC)" -p 26656:26656 -p 26657:26657 -p 1317:1317 --entrypoint "/usr/src/agoric-sdk/upgrade-test-scripts/start_to_to.sh" -v "$${PWD}:/workspace" $(REPOSITORY):$(dockerLabel) + $(RUN) -e "TMUX_USE_CC=$(tmuxCC)" \ + --entrypoint /usr/src/agoric-sdk/upgrade-test-scripts/start_to_to.sh \ + $(REPOSITORY):$(dockerLabel) .PHONY: local_sdk agoric-upgrade-7-2 agoric-upgrade-8 agoric-upgrade-8-1 agoric-upgrade-9 agoric-upgrade-10 agoric-upgrade-11 build build_test run From f71a9c953d3f1e5317c44f779cf21de7430107ac Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 11 Aug 2023 20:16:54 +0000 Subject: [PATCH 083/109] refactor(x/swingset): ReadArtifact -> ReadNextArtifact --- docs/architecture/state-sync.md | 4 ++-- .../x/swingset/keeper/extension_snapshotter.go | 6 +++--- .../x/swingset/keeper/swing_store_exports_handler.go | 12 ++++++------ .../keeper/swing_store_exports_handler_test.go | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md index 3f7b263ea5b..aa9f64f9fe4 100644 --- a/docs/architecture/state-sync.md +++ b/docs/architecture/state-sync.md @@ -118,7 +118,7 @@ sequenceDiagram D-CS-->>-SSEH-CS: SSEH-CS->>+SSES-CS: OnExportRetrieved() loop - SSES-CS->>+SSEH-CS: provider.ReadArtifact() + SSES-CS->>+SSEH-CS: provider.ReadNextArtifact() SSEH-CS->>+D-CS: Read(artifactFile) D-CS-->>-SSEH-CS: SSEH-CS-->>-SSES-CS: artifact{name, data} @@ -255,7 +255,7 @@ sequenceDiagram D-CS-->>-SSEH-CS: end loop extension snapshot items - SSEH-CS->>+SSES-CS: provider.readArtifact() + SSEH-CS->>+SSES-CS: provider.ReadNextArtifact() SSES-CS->>+SM-CS: payloadReader() SM-CS->>+SM-M: chunk = <-chunks SM-M-->>-SM-CS: diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index e6f5e28d666..14ffc538872 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -235,7 +235,7 @@ func (snapshotter *ExtensionSnapshotter) OnExportRetrieved(provider SwingStoreEx } for { - artifact, err := provider.ReadArtifact() + artifact, err := provider.ReadNextArtifact() if err == io.EOF { break } else if err != nil { @@ -304,7 +304,7 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo return exportData, nil } - readArtifact := func() (artifact types.SwingStoreArtifact, err error) { + readNextArtifact := func() (artifact types.SwingStoreArtifact, err error) { payloadBytes, err := payloadReader() if err != nil { return artifact, err @@ -315,7 +315,7 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo } return snapshotter.swingStoreExportsHandler.RestoreExport( - SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}, + SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadNextArtifact: readNextArtifact}, SwingStoreRestoreOptions{IncludeHistorical: false}, ) } diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index a01bcf35ff3..4b558b09df9 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -366,9 +366,9 @@ type SwingStoreExportProvider struct { BlockHeight uint64 // GetExportData is a function to return the "export data" of the SwingStore export, if any. GetExportData func() ([]*vstoragetypes.DataEntry, error) - // ReadArtifact is a function to return the next unread artifact in the SwingStore export. - // It errors with io.EOF upon reaching the end of the artifact list. - ReadArtifact func() (types.SwingStoreArtifact, error) + // ReadNextArtifact is a function to return the next unread artifact in the SwingStore export. + // It errors with io.EOF upon reaching the end of the list of available artifacts. + ReadNextArtifact func() (types.SwingStoreArtifact, error) } // SwingStoreExportEventHandler is used to handle events that occur while generating @@ -649,7 +649,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved nextArtifact := 0 - readArtifact := func() (artifact types.SwingStoreArtifact, err error) { + readNextArtifact := func() (artifact types.SwingStoreArtifact, err error) { if nextArtifact == len(manifest.Artifacts) { return artifact, io.EOF } else if nextArtifact > len(manifest.Artifacts) { @@ -670,7 +670,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return artifact, err } - err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportData: getExportData, ReadArtifact: readArtifact}) + err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportData: getExportData, ReadNextArtifact: readNextArtifact}) if err != nil { return err } @@ -758,7 +758,7 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore } for { - artifact, err := provider.ReadArtifact() + artifact, err := provider.ReadNextArtifact() if err == io.EOF { break } else if err != nil { diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go index 7396c501157..c13951c9414 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler_test.go @@ -31,7 +31,7 @@ func newTestSwingStoreEventHandler() testSwingStoreEventHandler { }, onExportRetrieved: func(provider SwingStoreExportProvider) error { for { - _, err := provider.ReadArtifact() + _, err := provider.ReadNextArtifact() if err == io.EOF { return nil } else if err != nil { From 3bd1004d8407655b272525ce5afff1fb2b6ccf1d Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 11 Aug 2023 20:55:04 +0000 Subject: [PATCH 084/109] refactor(x/swingset): create restore context in app --- golang/cosmos/app/app.go | 9 ++++++++- .../x/swingset/keeper/extension_snapshotter.go | 17 +++++------------ .../keeper/extension_snapshotter_test.go | 2 -- 3 files changed, 13 insertions(+), 15 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 44d0d7bda39..76208f7c976 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -102,6 +102,7 @@ import ( tmjson "github.com/tendermint/tendermint/libs/json" "github.com/tendermint/tendermint/libs/log" tmos "github.com/tendermint/tendermint/libs/os" + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" dbm "github.com/tendermint/tm-db" gaiaappparams "github.com/Agoric/agoric-sdk/golang/cosmos/app/params" @@ -117,6 +118,7 @@ import ( vbanktypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vbank/types" "github.com/Agoric/agoric-sdk/golang/cosmos/x/vibc" "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage" + vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" // unnamed import of statik for swagger UI support _ "github.com/cosmos/cosmos-sdk/client/docs/statik" @@ -472,10 +474,15 @@ func NewAgoricApp( return sendToController(true, string(bz)) }, ) + + getSwingStoreExportDataShadowCopy := func(height int64) []*vstoragetypes.DataEntry { + ctx := app.NewUncachedContext(false, tmproto.Header{Height: height}) + return app.SwingSetKeeper.ExportSwingStore(ctx) + } app.SwingSetSnapshotter = *swingsetkeeper.NewExtensionSnapshotter( bApp, &app.SwingStoreExportsHandler, - app.SwingSetKeeper.ExportSwingStore, + getSwingStoreExportDataShadowCopy, ) app.VibcKeeper = vibc.NewKeeper( diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index 14ffc538872..920c3a3228e 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -12,9 +12,7 @@ import ( vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" "github.com/cosmos/cosmos-sdk/baseapp" snapshots "github.com/cosmos/cosmos-sdk/snapshots/types" - sdk "github.com/cosmos/cosmos-sdk/types" "github.com/tendermint/tendermint/libs/log" - tmproto "github.com/tendermint/tendermint/proto/tendermint/types" ) // This module implements a Cosmos ExtensionSnapshotter to capture and restore @@ -68,9 +66,8 @@ type ExtensionSnapshotter struct { isConfigured func() bool // takeAppSnapshot is called by OnExportStarted when creating a snapshot takeAppSnapshot func(height int64) - newRestoreContext func(height int64) sdk.Context swingStoreExportsHandler *SwingStoreExportsHandler - getSwingStoreExportDataShadowCopy func(ctx sdk.Context) []*vstoragetypes.DataEntry + getSwingStoreExportDataShadowCopy func(height int64) []*vstoragetypes.DataEntry logger log.Logger activeSnapshot *snapshotDetails } @@ -79,14 +76,11 @@ type ExtensionSnapshotter struct { func NewExtensionSnapshotter( app *baseapp.BaseApp, swingStoreExportsHandler *SwingStoreExportsHandler, - getSwingStoreExportDataShadowCopy func(ctx sdk.Context) []*vstoragetypes.DataEntry, + getSwingStoreExportDataShadowCopy func(height int64) []*vstoragetypes.DataEntry, ) *ExtensionSnapshotter { return &ExtensionSnapshotter{ - isConfigured: func() bool { return app.SnapshotManager() != nil }, - takeAppSnapshot: app.Snapshot, - newRestoreContext: func(height int64) sdk.Context { - return app.NewUncachedContext(false, tmproto.Header{Height: height}) - }, + isConfigured: func() bool { return app.SnapshotManager() != nil }, + takeAppSnapshot: app.Snapshot, logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "extension snapshotter"), swingStoreExportsHandler: swingStoreExportsHandler, getSwingStoreExportDataShadowCopy: getSwingStoreExportDataShadowCopy, @@ -299,8 +293,7 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo // AppHash, which means the SwingStore data it contains can be used as the // trusted root against which to validate the artifacts. getExportData := func() ([]*vstoragetypes.DataEntry, error) { - ctx := snapshotter.newRestoreContext(height) - exportData := snapshotter.getSwingStoreExportDataShadowCopy(ctx) + exportData := snapshotter.getSwingStoreExportDataShadowCopy(height) return exportData, nil } diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go index 85440591c4f..2f20b1662f1 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter_test.go @@ -4,7 +4,6 @@ import ( "io" "testing" - sdk "github.com/cosmos/cosmos-sdk/types" "github.com/tendermint/tendermint/libs/log" ) @@ -12,7 +11,6 @@ func newTestExtensionSnapshotter() *ExtensionSnapshotter { logger := log.NewNopLogger() // log.NewTMLogger(log.NewSyncWriter( /* os.Stdout*/ io.Discard)).With("module", "sdk/app") return &ExtensionSnapshotter{ isConfigured: func() bool { return true }, - newRestoreContext: func(height int64) sdk.Context { return sdk.Context{} }, logger: logger, swingStoreExportsHandler: newTestSwingStoreExportsHandler(), } From c5c8d8cfcba8f7b51bb23ee3d758c9903d677af3 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 8 Aug 2023 20:17:09 +0000 Subject: [PATCH 085/109] refactor(cosmos): shared KVEntry --- golang/cosmos/types/kv_entry.go | 67 +++++++++++++++++++ golang/cosmos/x/swingset/keeper/keeper.go | 11 +-- golang/cosmos/x/swingset/keeper/querier.go | 2 +- golang/cosmos/x/vstorage/keeper/keeper.go | 38 +++++------ .../cosmos/x/vstorage/keeper/keeper_test.go | 41 ++++++------ golang/cosmos/x/vstorage/keeper/querier.go | 2 +- golang/cosmos/x/vstorage/types/types.go | 66 ------------------ golang/cosmos/x/vstorage/vstorage.go | 28 ++++---- golang/cosmos/x/vstorage/vstorage_test.go | 28 ++++---- 9 files changed, 143 insertions(+), 140 deletions(-) create mode 100644 golang/cosmos/types/kv_entry.go diff --git a/golang/cosmos/types/kv_entry.go b/golang/cosmos/types/kv_entry.go new file mode 100644 index 00000000000..7be89a8b843 --- /dev/null +++ b/golang/cosmos/types/kv_entry.go @@ -0,0 +1,67 @@ +package types + +import ( + "encoding/json" + "fmt" +) + +type KVEntry struct { + key string + value *string +} + +func NewKVEntry(key string, value string) KVEntry { + return KVEntry{key, &value} +} + +func NewKVEntryWithNoValue(key string) KVEntry { + return KVEntry{key, nil} +} + +// UnmarshalKVEntry interprets its argument as a [key: string, value?: string | null] +// JSON array and returns a corresponding KVEntry. +// The key must be a string, and the value (if present) must be a string or null. +func UnmarshalKVEntry(msg json.RawMessage) (entry KVEntry, err error) { + var generic [2]interface{} + err = json.Unmarshal(msg, &generic) + + if err != nil { + return + } + + key, ok := generic[0].(string) + if !ok { + err = fmt.Errorf("invalid entry key: %q", generic[0]) + return + } + + switch generic[1].(type) { + case string: + entry = NewKVEntry(key, generic[1].(string)) + case nil: + entry = NewKVEntryWithNoValue(key) + default: + err = fmt.Errorf("invalid entry value: %q", generic[1]) + } + return +} + +func (entry KVEntry) HasValue() bool { + return entry.value != nil +} + +func (entry KVEntry) Key() string { + return entry.key +} + +func (entry KVEntry) Value() *string { + return entry.value +} + +func (entry KVEntry) StringValue() string { + if entry.value != nil { + return *entry.value + } else { + return "" + } +} diff --git a/golang/cosmos/x/swingset/keeper/keeper.go b/golang/cosmos/x/swingset/keeper/keeper.go index 2640e7176e0..00f4191a8dd 100644 --- a/golang/cosmos/x/swingset/keeper/keeper.go +++ b/golang/cosmos/x/swingset/keeper/keeper.go @@ -16,6 +16,7 @@ import ( paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" "github.com/Agoric/agoric-sdk/golang/cosmos/ante" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragekeeper "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/keeper" @@ -261,7 +262,7 @@ func getBeansOwingPathForAddress(addr sdk.AccAddress) string { func (k Keeper) GetBeansOwing(ctx sdk.Context, addr sdk.AccAddress) sdk.Uint { path := getBeansOwingPathForAddress(addr) entry := k.vstorageKeeper.GetEntry(ctx, path) - if !entry.HasData() { + if !entry.HasValue() { return sdk.ZeroUint() } return sdk.NewUintFromString(entry.StringValue()) @@ -271,7 +272,7 @@ func (k Keeper) GetBeansOwing(ctx sdk.Context, addr sdk.AccAddress) sdk.Uint { // feeCollector but has not yet paid. func (k Keeper) SetBeansOwing(ctx sdk.Context, addr sdk.AccAddress, beans sdk.Uint) { path := getBeansOwingPathForAddress(addr) - k.vstorageKeeper.SetStorage(ctx, vstoragetypes.NewStorageEntry(path, beans.String())) + k.vstorageKeeper.SetStorage(ctx, agoric.NewKVEntry(path, beans.String())) } // ChargeBeans charges the given address the given number of beans. It divides @@ -375,7 +376,7 @@ func (k Keeper) ChargeForProvisioning(ctx sdk.Context, submitter, addr sdk.AccAd func (k Keeper) GetEgress(ctx sdk.Context, addr sdk.AccAddress) types.Egress { path := StoragePathEgress + "." + addr.String() entry := k.vstorageKeeper.GetEntry(ctx, path) - if !entry.HasData() { + if !entry.HasValue() { return types.Egress{} } @@ -398,7 +399,7 @@ func (k Keeper) SetEgress(ctx sdk.Context, egress *types.Egress) error { } // FIXME: We should use just SetStorageAndNotify here, but solo needs legacy for now. - k.vstorageKeeper.LegacySetStorageAndNotify(ctx, vstoragetypes.NewStorageEntry(path, string(bz))) + k.vstorageKeeper.LegacySetStorageAndNotify(ctx, agoric.NewKVEntry(path, string(bz))) // Now make sure the corresponding account has been initialised. if acc := k.accountKeeper.GetAccount(ctx, egress.Peer); acc != nil { @@ -431,7 +432,7 @@ func (k Keeper) GetMailbox(ctx sdk.Context, peer string) string { func (k Keeper) SetMailbox(ctx sdk.Context, peer string, mailbox string) { path := StoragePathMailbox + "." + peer // FIXME: We should use just SetStorageAndNotify here, but solo needs legacy for now. - k.vstorageKeeper.LegacySetStorageAndNotify(ctx, vstoragetypes.NewStorageEntry(path, mailbox)) + k.vstorageKeeper.LegacySetStorageAndNotify(ctx, agoric.NewKVEntry(path, mailbox)) } func (k Keeper) ExportSwingStore(ctx sdk.Context) []*vstoragetypes.DataEntry { diff --git a/golang/cosmos/x/swingset/keeper/querier.go b/golang/cosmos/x/swingset/keeper/querier.go index ca678950371..3195b40885b 100644 --- a/golang/cosmos/x/swingset/keeper/querier.go +++ b/golang/cosmos/x/swingset/keeper/querier.go @@ -80,7 +80,7 @@ func queryMailbox(ctx sdk.Context, path []string, req abci.RequestQuery, keeper // nolint: unparam func legacyQueryStorage(ctx sdk.Context, path string, req abci.RequestQuery, keeper Keeper, legacyQuerierCdc *codec.LegacyAmino) (res []byte, err error) { entry := keeper.vstorageKeeper.GetEntry(ctx, path) - if !entry.HasData() { + if !entry.HasValue() { return []byte{}, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "could not get swingset %+v", path) } diff --git a/golang/cosmos/x/vstorage/keeper/keeper.go b/golang/cosmos/x/vstorage/keeper/keeper.go index 9a91e793b10..cc0e9d3298c 100644 --- a/golang/cosmos/x/vstorage/keeper/keeper.go +++ b/golang/cosmos/x/vstorage/keeper/keeper.go @@ -35,7 +35,7 @@ type ProposedChange struct { } type ChangeManager interface { - Track(ctx sdk.Context, k Keeper, entry types.StorageEntry, isLegacy bool) + Track(ctx sdk.Context, k Keeper, entry agoric.KVEntry, isLegacy bool) EmitEvents(ctx sdk.Context, k Keeper) Rollback(ctx sdk.Context) } @@ -65,8 +65,8 @@ type Keeper struct { storeKey sdk.StoreKey } -func (bcm *BatchingChangeManager) Track(ctx sdk.Context, k Keeper, entry types.StorageEntry, isLegacy bool) { - path := entry.Path() +func (bcm *BatchingChangeManager) Track(ctx sdk.Context, k Keeper, entry agoric.KVEntry, isLegacy bool) { + path := entry.Key() // TODO: differentiate between deletion and setting empty string? // Using empty string for deletion for backwards compatibility value := entry.StringValue() @@ -177,7 +177,7 @@ func (k Keeper) ImportStorage(ctx sdk.Context, entries []*types.DataEntry) { for _, entry := range entries { // This set does the bookkeeping for us in case the entries aren't a // complete tree. - k.SetStorage(ctx, types.NewStorageEntry(entry.Path, entry.Value)) + k.SetStorage(ctx, agoric.NewKVEntry(entry.Path, entry.Value)) } } @@ -205,22 +205,22 @@ func (k Keeper) EmitChange(ctx sdk.Context, change *ProposedChange) { } // GetEntry gets generic storage. The default value is an empty string. -func (k Keeper) GetEntry(ctx sdk.Context, path string) types.StorageEntry { +func (k Keeper) GetEntry(ctx sdk.Context, path string) agoric.KVEntry { //fmt.Printf("GetEntry(%s)\n", path); store := ctx.KVStore(k.storeKey) encodedKey := types.PathToEncodedKey(path) rawValue := store.Get(encodedKey) if len(rawValue) == 0 { - return types.NewStorageEntryWithNoData(path) + return agoric.NewKVEntryWithNoValue(path) } if bytes.Equal(rawValue, types.EncodedNoDataValue) { - return types.NewStorageEntryWithNoData(path) + return agoric.NewKVEntryWithNoValue(path) } value, hasPrefix := cutPrefix(rawValue, types.EncodedDataPrefix) if !hasPrefix { panic(fmt.Errorf("value at path %q starts with unexpected prefix", path)) } - return types.NewStorageEntry(path, string(value)) + return agoric.NewKVEntry(path, string(value)) } func (k Keeper) getKeyIterator(ctx sdk.Context, path string) db.Iterator { @@ -249,7 +249,7 @@ func (k Keeper) GetChildren(ctx sdk.Context, path string) *types.Children { // (just an empty string) and exist only to provide linkage to subnodes with // data. func (k Keeper) HasStorage(ctx sdk.Context, path string) bool { - return k.GetEntry(ctx, path).HasData() + return k.GetEntry(ctx, path).HasValue() } // HasEntry tells if a given path has either subnodes or data. @@ -278,12 +278,12 @@ func (k Keeper) FlushChangeEvents(ctx sdk.Context) { k.changeManager.Rollback(ctx) } -func (k Keeper) SetStorageAndNotify(ctx sdk.Context, entry types.StorageEntry) { +func (k Keeper) SetStorageAndNotify(ctx sdk.Context, entry agoric.KVEntry) { k.changeManager.Track(ctx, k, entry, false) k.SetStorage(ctx, entry) } -func (k Keeper) LegacySetStorageAndNotify(ctx sdk.Context, entry types.StorageEntry) { +func (k Keeper) LegacySetStorageAndNotify(ctx sdk.Context, entry agoric.KVEntry) { k.changeManager.Track(ctx, k, entry, true) k.SetStorage(ctx, entry) } @@ -308,7 +308,7 @@ func (k Keeper) AppendStorageValueAndNotify(ctx sdk.Context, path, value string) if err != nil { return err } - k.SetStorageAndNotify(ctx, types.NewStorageEntry(path, string(bz))) + k.SetStorageAndNotify(ctx, agoric.NewKVEntry(path, string(bz))) return nil } @@ -320,12 +320,12 @@ func componentsToPath(components []string) string { // // Maintains the invariant: path entries exist if and only if self or some // descendant has non-empty storage -func (k Keeper) SetStorage(ctx sdk.Context, entry types.StorageEntry) { +func (k Keeper) SetStorage(ctx sdk.Context, entry agoric.KVEntry) { store := ctx.KVStore(k.storeKey) - path := entry.Path() + path := entry.Key() encodedKey := types.PathToEncodedKey(path) - if !entry.HasData() { + if !entry.HasValue() { if !k.HasChildren(ctx, path) { // We have no children, can delete. store.Delete(encodedKey) @@ -340,7 +340,7 @@ func (k Keeper) SetStorage(ctx sdk.Context, entry types.StorageEntry) { // Update our other parent children. pathComponents := strings.Split(path, types.PathSeparator) - if !entry.HasData() { + if !entry.HasValue() { // delete placeholder ancestors if they're no longer needed for i := len(pathComponents) - 1; i >= 0; i-- { ancestor := componentsToPath(pathComponents[0:i]) @@ -381,7 +381,7 @@ func (k Keeper) GetNoDataValue() []byte { func (k Keeper) getIntValue(ctx sdk.Context, path string) (sdk.Int, error) { indexEntry := k.GetEntry(ctx, path) - if !indexEntry.HasData() { + if !indexEntry.HasValue() { return sdk.NewInt(0), nil } @@ -420,10 +420,10 @@ func (k Keeper) PushQueueItem(ctx sdk.Context, queuePath string, value string) e // Set the vstorage corresponding to the queue entry for the current tail. path := queuePath + "." + tail.String() - k.SetStorage(ctx, types.NewStorageEntry(path, value)) + k.SetStorage(ctx, agoric.NewKVEntry(path, value)) // Update the tail to point to the next available entry. path = queuePath + ".tail" - k.SetStorage(ctx, types.NewStorageEntry(path, nextTail.String())) + k.SetStorage(ctx, agoric.NewKVEntry(path, nextTail.String())) return nil } diff --git a/golang/cosmos/x/vstorage/keeper/keeper_test.go b/golang/cosmos/x/vstorage/keeper/keeper_test.go index 1a2ce5d58ee..120e707b64b 100644 --- a/golang/cosmos/x/vstorage/keeper/keeper_test.go +++ b/golang/cosmos/x/vstorage/keeper/keeper_test.go @@ -4,6 +4,7 @@ import ( "reflect" "testing" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" "github.com/cosmos/cosmos-sdk/store" @@ -57,19 +58,19 @@ func TestStorage(t *testing.T) { ctx, keeper := testKit.ctx, testKit.vstorageKeeper // Test that we can store and retrieve a value. - keeper.SetStorage(ctx, types.NewStorageEntry("inited", "initValue")) + keeper.SetStorage(ctx, agoric.NewKVEntry("inited", "initValue")) if got := keeper.GetEntry(ctx, "inited").StringValue(); got != "initValue" { t.Errorf("got %q, want %q", got, "initValue") } // Test that unknown children return empty string. - if got := keeper.GetEntry(ctx, "unknown"); got.HasData() || got.StringValue() != "" { + if got := keeper.GetEntry(ctx, "unknown"); got.HasValue() || got.StringValue() != "" { t.Errorf("got %q, want no value", got.StringValue()) } // Test that we can store and retrieve an empty string value. - keeper.SetStorage(ctx, types.NewStorageEntry("inited", "")) - if got := keeper.GetEntry(ctx, "inited"); !got.HasData() || got.StringValue() != "" { + keeper.SetStorage(ctx, agoric.NewKVEntry("inited", "")) + if got := keeper.GetEntry(ctx, "inited"); !got.HasValue() || got.StringValue() != "" { t.Errorf("got %q, want empty string", got.StringValue()) } @@ -78,18 +79,18 @@ func TestStorage(t *testing.T) { t.Errorf("got %q children, want [inited]", got.Children) } - keeper.SetStorage(ctx, types.NewStorageEntry("key1", "value1")) + keeper.SetStorage(ctx, agoric.NewKVEntry("key1", "value1")) if got := keeper.GetChildren(ctx, ""); !childrenEqual(got.Children, []string{"inited", "key1"}) { t.Errorf("got %q children, want [inited,key1]", got.Children) } // Check alphabetical. - keeper.SetStorage(ctx, types.NewStorageEntry("alpha2", "value2")) + keeper.SetStorage(ctx, agoric.NewKVEntry("alpha2", "value2")) if got := keeper.GetChildren(ctx, ""); !childrenEqual(got.Children, []string{"alpha2", "inited", "key1"}) { t.Errorf("got %q children, want [alpha2,inited,key1]", got.Children) } - keeper.SetStorage(ctx, types.NewStorageEntry("beta3", "value3")) + keeper.SetStorage(ctx, agoric.NewKVEntry("beta3", "value3")) if got := keeper.GetChildren(ctx, ""); !childrenEqual(got.Children, []string{"alpha2", "beta3", "inited", "key1"}) { t.Errorf("got %q children, want [alpha2,beta3,inited,key1]", got.Children) } @@ -99,7 +100,7 @@ func TestStorage(t *testing.T) { } // Check adding children. - keeper.SetStorage(ctx, types.NewStorageEntry("key1.child1", "value1child")) + keeper.SetStorage(ctx, agoric.NewKVEntry("key1.child1", "value1child")) if got := keeper.GetEntry(ctx, "key1.child1").StringValue(); got != "value1child" { t.Errorf("got %q, want %q", got, "value1child") } @@ -109,7 +110,7 @@ func TestStorage(t *testing.T) { } // Add a grandchild. - keeper.SetStorage(ctx, types.NewStorageEntry("key1.child1.grandchild1", "value1grandchild")) + keeper.SetStorage(ctx, agoric.NewKVEntry("key1.child1.grandchild1", "value1grandchild")) if got := keeper.GetEntry(ctx, "key1.child1.grandchild1").StringValue(); got != "value1grandchild" { t.Errorf("got %q, want %q", got, "value1grandchild") } @@ -119,7 +120,7 @@ func TestStorage(t *testing.T) { } // Delete the child's contents. - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key1.child1")) + keeper.SetStorage(ctx, agoric.NewKVEntryWithNoValue("key1.child1")) if got := keeper.GetChildren(ctx, "key1"); !childrenEqual(got.Children, []string{"child1"}) { t.Errorf("got %q children, want [child1]", got.Children) } @@ -129,7 +130,7 @@ func TestStorage(t *testing.T) { } // Delete the grandchild's contents. - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key1.child1.grandchild1")) + keeper.SetStorage(ctx, agoric.NewKVEntryWithNoValue("key1.child1.grandchild1")) if got := keeper.GetChildren(ctx, "key1.child1"); !childrenEqual(got.Children, []string{}) { t.Errorf("got %q children, want []", got.Children) } @@ -139,13 +140,13 @@ func TestStorage(t *testing.T) { } // See about deleting the parent. - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key1")) + keeper.SetStorage(ctx, agoric.NewKVEntryWithNoValue("key1")) if got := keeper.GetChildren(ctx, ""); !childrenEqual(got.Children, []string{"alpha2", "beta3", "inited"}) { t.Errorf("got %q children, want [alpha2,beta3,inited]", got.Children) } // Do a deep set. - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2.grandchild2", "value2grandchild")) + keeper.SetStorage(ctx, agoric.NewKVEntry("key2.child2.grandchild2", "value2grandchild")) if got := keeper.GetChildren(ctx, ""); !childrenEqual(got.Children, []string{"alpha2", "beta3", "inited", "key2"}) { t.Errorf("got %q children, want [alpha2,beta3,inited,key2]", got.Children) } @@ -157,7 +158,7 @@ func TestStorage(t *testing.T) { } // Do another deep set. - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2.grandchild2a", "value2grandchilda")) + keeper.SetStorage(ctx, agoric.NewKVEntry("key2.child2.grandchild2a", "value2grandchilda")) if got := keeper.GetChildren(ctx, "key2.child2"); !childrenEqual(got.Children, []string{"grandchild2", "grandchild2a"}) { t.Errorf("got %q children, want [grandchild2,grandchild2a]", got.Children) } @@ -191,12 +192,12 @@ func TestStorageNotify(t *testing.T) { tk := makeTestKit() ctx, keeper := tk.ctx, tk.vstorageKeeper - keeper.SetStorageAndNotify(ctx, types.NewStorageEntry("notify.noLegacy", "noLegacyValue")) - keeper.LegacySetStorageAndNotify(ctx, types.NewStorageEntry("notify.legacy", "legacyValue")) - keeper.SetStorageAndNotify(ctx, types.NewStorageEntry("notify.noLegacy2", "noLegacyValue2")) - keeper.SetStorageAndNotify(ctx, types.NewStorageEntry("notify.legacy2", "legacyValue2")) - keeper.LegacySetStorageAndNotify(ctx, types.NewStorageEntry("notify.legacy2", "legacyValue2b")) - keeper.SetStorageAndNotify(ctx, types.NewStorageEntry("notify.noLegacy2", "noLegacyValue2b")) + keeper.SetStorageAndNotify(ctx, agoric.NewKVEntry("notify.noLegacy", "noLegacyValue")) + keeper.LegacySetStorageAndNotify(ctx, agoric.NewKVEntry("notify.legacy", "legacyValue")) + keeper.SetStorageAndNotify(ctx, agoric.NewKVEntry("notify.noLegacy2", "noLegacyValue2")) + keeper.SetStorageAndNotify(ctx, agoric.NewKVEntry("notify.legacy2", "legacyValue2")) + keeper.LegacySetStorageAndNotify(ctx, agoric.NewKVEntry("notify.legacy2", "legacyValue2b")) + keeper.SetStorageAndNotify(ctx, agoric.NewKVEntry("notify.noLegacy2", "noLegacyValue2b")) // Check the batched events. expectedBeforeFlushEvents := sdk.Events{} diff --git a/golang/cosmos/x/vstorage/keeper/querier.go b/golang/cosmos/x/vstorage/keeper/querier.go index 698d61fac3b..44a8a8d40b4 100644 --- a/golang/cosmos/x/vstorage/keeper/querier.go +++ b/golang/cosmos/x/vstorage/keeper/querier.go @@ -35,7 +35,7 @@ func NewQuerier(keeper Keeper, legacyQuerierCdc *codec.LegacyAmino) sdk.Querier // nolint: unparam func queryData(ctx sdk.Context, path string, req abci.RequestQuery, keeper Keeper, legacyQuerierCdc *codec.LegacyAmino) (res []byte, err error) { entry := keeper.GetEntry(ctx, path) - if !entry.HasData() { + if !entry.HasValue() { return nil, sdkerrors.Wrap(sdkerrors.ErrUnknownRequest, "could not get vstorage path") } diff --git a/golang/cosmos/x/vstorage/types/types.go b/golang/cosmos/x/vstorage/types/types.go index b8cb3174c16..c65fe0948ea 100644 --- a/golang/cosmos/x/vstorage/types/types.go +++ b/golang/cosmos/x/vstorage/types/types.go @@ -1,10 +1,5 @@ package types -import ( - "encoding/json" - "fmt" -) - func NewData() *Data { return &Data{} } @@ -12,64 +7,3 @@ func NewData() *Data { func NewChildren() *Children { return &Children{} } - -type StorageEntry struct { - path string - value *string -} - -func NewStorageEntry(path string, value string) StorageEntry { - return StorageEntry{path, &value} -} - -func NewStorageEntryWithNoData(path string) StorageEntry { - return StorageEntry{path, nil} -} - -// UnmarshalStorageEntry interprets its argument as a [key: string, value?: string | null] -// JSON array and returns a corresponding StorageEntry. -// The key must be a string, and the value (if present) must be a string or null. -func UnmarshalStorageEntry(msg json.RawMessage) (entry StorageEntry, err error) { - var generic [2]interface{} - err = json.Unmarshal(msg, &generic) - - if err != nil { - return - } - - path, ok := generic[0].(string) - if !ok { - err = fmt.Errorf("invalid storage entry path: %q", generic[0]) - return - } - - switch generic[1].(type) { - case string: - entry = NewStorageEntry(path, generic[1].(string)) - case nil: - entry = NewStorageEntryWithNoData(path) - default: - err = fmt.Errorf("invalid storage entry value: %q", generic[1]) - } - return -} - -func (se StorageEntry) HasData() bool { - return se.value != nil -} - -func (se StorageEntry) Path() string { - return se.path -} - -func (se StorageEntry) Value() *string { - return se.value -} - -func (se StorageEntry) StringValue() string { - if se.value != nil { - return *se.value - } else { - return "" - } -} diff --git a/golang/cosmos/x/vstorage/vstorage.go b/golang/cosmos/x/vstorage/vstorage.go index b2120948a30..7885ae59199 100644 --- a/golang/cosmos/x/vstorage/vstorage.go +++ b/golang/cosmos/x/vstorage/vstorage.go @@ -7,8 +7,8 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" - "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" ) type vstorageHandler struct { @@ -69,8 +69,8 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s switch msg.Method { case "set": for _, arg := range msg.Args { - var entry types.StorageEntry - entry, err = types.UnmarshalStorageEntry(arg) + var entry agoric.KVEntry + entry, err = agoric.UnmarshalKVEntry(arg) if err != nil { return } @@ -83,8 +83,8 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s // FIXME: Use just "set" and remove this case. case "legacySet": for _, arg := range msg.Args { - var entry types.StorageEntry - entry, err = types.UnmarshalStorageEntry(arg) + var entry agoric.KVEntry + entry, err = agoric.UnmarshalKVEntry(arg) if err != nil { return } @@ -95,8 +95,8 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s case "setWithoutNotify": for _, arg := range msg.Args { - var entry types.StorageEntry - entry, err = types.UnmarshalStorageEntry(arg) + var entry agoric.KVEntry + entry, err = agoric.UnmarshalKVEntry(arg) if err != nil { return } @@ -106,16 +106,16 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s case "append": for _, arg := range msg.Args { - var entry types.StorageEntry - entry, err = types.UnmarshalStorageEntry(arg) + var entry agoric.KVEntry + entry, err = agoric.UnmarshalKVEntry(arg) if err != nil { return } - if !entry.HasData() { - err = fmt.Errorf("no value for append entry with path: %q", entry.Path()) + if !entry.HasValue() { + err = fmt.Errorf("no value for append entry with path: %q", entry.Key()) return } - err = keeper.AppendStorageValueAndNotify(cctx.Context, entry.Path(), entry.StringValue()) + err = keeper.AppendStorageValueAndNotify(cctx.Context, entry.Key(), entry.StringValue()) if err != nil { return } @@ -131,7 +131,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s } entry := keeper.GetEntry(cctx.Context, path) - if !entry.HasData() { + if !entry.HasValue() { return "null", nil } bz, err := json.Marshal(entry.StringValue()) @@ -197,7 +197,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s entries := make([][]interface{}, len(children.Children)) for i, child := range children.Children { entry := keeper.GetEntry(cctx.Context, fmt.Sprintf("%s.%s", path, child)) - if !entry.HasData() { + if !entry.HasValue() { entries[i] = []interface{}{child} } else { entries[i] = []interface{}{child, entry.Value()} diff --git a/golang/cosmos/x/vstorage/vstorage_test.go b/golang/cosmos/x/vstorage/vstorage_test.go index 02e478aea09..6dc1bd84576 100644 --- a/golang/cosmos/x/vstorage/vstorage_test.go +++ b/golang/cosmos/x/vstorage/vstorage_test.go @@ -70,10 +70,10 @@ func TestGetAndHas(t *testing.T) { kit := makeTestKit() keeper, handler, ctx, cctx := kit.keeper, kit.handler, kit.ctx, kit.cctx - keeper.SetStorage(ctx, types.NewStorageEntry("foo", "bar")) - keeper.SetStorage(ctx, types.NewStorageEntry("empty", "")) - keeper.SetStorage(ctx, types.NewStorageEntry("top.empty-non-terminal.leaf", "")) - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("top.empty-non-terminal")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("foo", "bar")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("empty", "")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("top.empty-non-terminal.leaf", "")) + keeper.SetStorage(ctx, agorictypes.NewKVEntryWithNoValue("top.empty-non-terminal")) type testCase struct { label string @@ -153,7 +153,7 @@ func doTestSet(t *testing.T, method string, expectNotify bool) { // TODO: Fully validate input before making changes // args: []interface{}{[]string{"foo", "X"}, []interface{}{42, "new"}}, args: []interface{}{[]interface{}{42, "new"}}, - errContains: ptr("path"), + errContains: ptr("key"), }, {label: "non-string value", // TODO: Fully validate input before making changes @@ -259,15 +259,15 @@ func TestEntries(t *testing.T) { kit := makeTestKit() keeper, handler, ctx, cctx := kit.keeper, kit.handler, kit.ctx, kit.cctx - keeper.SetStorage(ctx, types.NewStorageEntry("key1", "value1")) - keeper.SetStorage(ctx, types.NewStorageEntry("key1.child1.grandchild1", "value1grandchild")) - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key1.child1.grandchild2")) - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key1.child1")) - keeper.SetStorage(ctx, types.NewStorageEntry("key1.child1.empty-non-terminal.leaf", "")) - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key2")) - keeper.SetStorage(ctx, types.NewStorageEntryWithNoData("key2.child2")) - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2.grandchild2", "value2grandchild")) - keeper.SetStorage(ctx, types.NewStorageEntry("key2.child2.grandchild2a", "value2grandchilda")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("key1", "value1")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("key1.child1.grandchild1", "value1grandchild")) + keeper.SetStorage(ctx, agorictypes.NewKVEntryWithNoValue("key1.child1.grandchild2")) + keeper.SetStorage(ctx, agorictypes.NewKVEntryWithNoValue("key1.child1")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("key1.child1.empty-non-terminal.leaf", "")) + keeper.SetStorage(ctx, agorictypes.NewKVEntryWithNoValue("key2")) + keeper.SetStorage(ctx, agorictypes.NewKVEntryWithNoValue("key2.child2")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("key2.child2.grandchild2", "value2grandchild")) + keeper.SetStorage(ctx, agorictypes.NewKVEntry("key2.child2.grandchild2a", "value2grandchilda")) type testCase struct { path string From 6d2fe11d144c5bbdc1611b59c84b6842e8084cb9 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 8 Aug 2023 22:30:41 +0000 Subject: [PATCH 086/109] feat(cosmos): KVEntry implements json Marshaler and Unmarshaller --- golang/cosmos/types/kv_entry.go | 95 ++++++++++---- golang/cosmos/types/kv_entry_test.go | 143 ++++++++++++++++++++++ golang/cosmos/x/vstorage/vstorage.go | 23 ++-- golang/cosmos/x/vstorage/vstorage_test.go | 2 +- 4 files changed, 225 insertions(+), 38 deletions(-) create mode 100644 golang/cosmos/types/kv_entry_test.go diff --git a/golang/cosmos/types/kv_entry.go b/golang/cosmos/types/kv_entry.go index 7be89a8b843..44448ad25b6 100644 --- a/golang/cosmos/types/kv_entry.go +++ b/golang/cosmos/types/kv_entry.go @@ -5,63 +5,110 @@ import ( "fmt" ) +var _ json.Marshaler = &KVEntry{} +var _ json.Unmarshaler = &KVEntry{} + +// KVEntry represents a string key / string value pair, where the value may be +// missing, which is different from an empty value. +// The semantics of a missing value are purpose-dependent rather than specified +// here, but frequently correspond with deletion/incompleteness/etc. +// A KVEntry with an empty key is considered invalid. type KVEntry struct { key string value *string } +// NewKVEntry creates a KVEntry with the provided key and value func NewKVEntry(key string, value string) KVEntry { return KVEntry{key, &value} } +// NewKVEntryWithNoValue creates a KVEntry with the provided key and no value func NewKVEntryWithNoValue(key string) KVEntry { return KVEntry{key, nil} } -// UnmarshalKVEntry interprets its argument as a [key: string, value?: string | null] -// JSON array and returns a corresponding KVEntry. -// The key must be a string, and the value (if present) must be a string or null. -func UnmarshalKVEntry(msg json.RawMessage) (entry KVEntry, err error) { - var generic [2]interface{} - err = json.Unmarshal(msg, &generic) - +// UnmarshalJSON updates a KVEntry from JSON text corresponding with a +// [key: string, value?: string | null] shape, or returns an error indicating +// invalid input. +// The key must be a non-empty string, and the value (if present) must be a +// string or null. +// +// Implements json.Unmarshaler +// Note: unlike other methods, this accepts a pointer to satisfy +// the Unmarshaler semantics. +func (entry *KVEntry) UnmarshalJSON(input []byte) (err error) { + var generic []*string + err = json.Unmarshal(input, &generic) if err != nil { - return + return err + } + + length := len(generic) + + if generic == nil { + return fmt.Errorf("KVEntry cannot be null") + } + if length != 1 && length != 2 { + return fmt.Errorf("KVEntry must be an array of length 1 or 2 (not %d)", length) } - key, ok := generic[0].(string) - if !ok { - err = fmt.Errorf("invalid entry key: %q", generic[0]) - return + key := generic[0] + if key == nil || *key == "" { + return fmt.Errorf("KVEntry key must be a non-empty string: %v", key) } - switch generic[1].(type) { - case string: - entry = NewKVEntry(key, generic[1].(string)) - case nil: - entry = NewKVEntryWithNoValue(key) - default: - err = fmt.Errorf("invalid entry value: %q", generic[1]) + var value *string + if length == 2 { + value = generic[1] } - return + + entry.key = *key + entry.value = value + + return nil } -func (entry KVEntry) HasValue() bool { - return entry.value != nil +// MarshalJSON encodes the KVEntry into a JSON array of [key: string, value?: string], +// with the value missing (array length of 1) if the entry has no value. +// +// Implements json.Marshaler +func (entry KVEntry) MarshalJSON() ([]byte, error) { + if !entry.IsValidKey() { + return nil, fmt.Errorf("cannot marshal invalid KVEntry") + } + if entry.value != nil { + return json.Marshal([2]string{entry.key, *entry.value}) + } else { + return json.Marshal([1]string{entry.key}) + } +} + +// IsValidKey returns whether the KVEntry has a non-empty key. +func (entry KVEntry) IsValidKey() bool { + return entry.key != "" } +// Key returns the string key. func (entry KVEntry) Key() string { return entry.key } +// HasValue returns whether the KVEntry has a value or not. +func (entry KVEntry) HasValue() bool { + return entry.value != nil +} + +// Value returns a pointer to the string value or nil if the entry has no value. func (entry KVEntry) Value() *string { return entry.value } +// StringValue returns the string value, or the empty string if the entry has no value. +// Note that the result therefore does not differentiate an empty string value from no value. func (entry KVEntry) StringValue() string { if entry.value != nil { return *entry.value - } else { - return "" } + return "" } diff --git a/golang/cosmos/types/kv_entry_test.go b/golang/cosmos/types/kv_entry_test.go new file mode 100644 index 00000000000..2a5c5b1e859 --- /dev/null +++ b/golang/cosmos/types/kv_entry_test.go @@ -0,0 +1,143 @@ +package types + +import ( + "encoding/json" + "errors" + "strings" + "testing" +) + +func checkEntry(t *testing.T, label string, entry KVEntry, isValidKey bool, expectedKey string, hasValue bool, expectedValue string) { + gotValidKey := entry.IsValidKey() + if gotValidKey != isValidKey { + t.Errorf("%s: valid key is %v, expected %v", label, gotValidKey, isValidKey) + } + + gotKey := entry.Key() + if gotKey != expectedKey { + t.Errorf("%s: got %q, want %q", label, gotKey, expectedKey) + } + + if entry.HasValue() { + if !hasValue { + t.Errorf("%s: expected has no value", label) + } + + gotValue := *entry.Value() + if gotValue != expectedValue { + t.Errorf("%s: got %q, want %q", label, gotValue, expectedValue) + } + } else { + if hasValue { + t.Errorf("%s: expected has value", label) + } + + gotValuePointer := entry.Value() + if gotValuePointer != nil { + t.Errorf("%s: got %#v, want nil", label, gotValuePointer) + } + } + + gotValue := entry.StringValue() + if gotValue != expectedValue { + t.Errorf("%s: got %q, want %q", label, gotValue, expectedValue) + } +} + +func TestKVEntry(t *testing.T) { + type testCase struct { + label string + entry KVEntry + isValidKey bool + expectedKey string + hasValue bool + expectedValue string + } + cases := []testCase{ + {label: "normal", entry: NewKVEntry("foo", "bar"), isValidKey: true, expectedKey: "foo", hasValue: true, expectedValue: "bar"}, + {label: "empty string value", entry: NewKVEntry("foo", ""), isValidKey: true, expectedKey: "foo", hasValue: true, expectedValue: ""}, + {label: "no value", entry: NewKVEntryWithNoValue("foo"), isValidKey: true, expectedKey: "foo", hasValue: false, expectedValue: ""}, + {label: "empty key", entry: NewKVEntryWithNoValue(""), isValidKey: false, expectedKey: "", hasValue: false, expectedValue: ""}, + } + for _, desc := range cases { + checkEntry(t, desc.label, desc.entry, desc.isValidKey, desc.expectedKey, desc.hasValue, desc.expectedValue) + } +} + +func TestKVEntryMarshall(t *testing.T) { + type testCase struct { + label string + entry KVEntry + expectedError error + expectedEncoding string + } + cases := []testCase{ + {label: "normal", entry: NewKVEntry("foo", "bar"), expectedEncoding: `["foo","bar"]`}, + {label: "empty string value", entry: NewKVEntry("foo", ""), expectedEncoding: `["foo",""]`}, + {label: "no value", entry: NewKVEntryWithNoValue("foo"), expectedEncoding: `["foo"]`}, + {label: "empty key", entry: NewKVEntryWithNoValue(""), expectedError: errors.New("cannot marshal invalid KVEntry")}, + } + for _, desc := range cases { + marshalled, err := json.Marshal(desc.entry) + if desc.expectedError != nil && err == nil { + t.Errorf("%s: got nil error, expected marshal error: %q", desc.label, desc.expectedError.Error()) + } else if err != nil { + if desc.expectedError == nil { + t.Errorf("%s: got error %v, expected no error", desc.label, err) + } else if !strings.Contains(err.Error(), desc.expectedError.Error()) { + t.Errorf("%s: got error %q, expected error %q", desc.label, err.Error(), desc.expectedError.Error()) + } + continue + } + if string(marshalled) != desc.expectedEncoding { + t.Errorf("%s: got %q, want %q", desc.label, string(marshalled), desc.expectedEncoding) + } + } +} + +func TestKVEntryUnmarshall(t *testing.T) { + type testCase struct { + label string + encoded string + expectedError error + expectedKey string + hasValue bool + expectedValue string + } + cases := []testCase{ + {label: "normal", encoded: `["foo","bar"]`, expectedKey: "foo", hasValue: true, expectedValue: "bar"}, + {label: "empty string value", encoded: `["foo",""]`, expectedKey: "foo", hasValue: true, expectedValue: ""}, + {label: "no value", encoded: `["foo"]`, expectedKey: "foo", hasValue: false, expectedValue: ""}, + {label: "null value", encoded: `["foo",null]`, expectedKey: "foo", hasValue: false, expectedValue: ""}, + {label: "null", encoded: `null`, expectedError: errors.New("KVEntry cannot be null")}, + {label: "string", encoded: `"foo"`, expectedError: errors.New("json")}, + {label: "empty array", encoded: `[]`, expectedError: errors.New("KVEntry must be an array of length 1 or 2 (not 0)")}, + {label: "[null, null] array", encoded: `[null,null]`, expectedError: errors.New("KVEntry key must be a non-empty string")}, + {label: "invalid key array", encoded: `[42]`, expectedError: errors.New("json")}, + {label: "empty key", encoded: `["",null]`, expectedError: errors.New("KVEntry key must be a non-empty string")}, + {label: "too many entries array", encoded: `["foo","bar",null]`, expectedError: errors.New("KVEntry must be an array of length 1 or 2 (not 3)")}, + {label: "invalid value array", encoded: `["foo",42]`, expectedError: errors.New("json")}, + } + for _, desc := range cases { + unmarshalled := NewKVEntry("untouched", "untouched") + err := json.Unmarshal([]byte(desc.encoded), &unmarshalled) + if desc.expectedError != nil && err == nil { + t.Errorf("%s: got nil error, expected unmarshal error: %q", desc.label, desc.expectedError.Error()) + } else if err != nil { + if unmarshalled.Key() != "untouched" { + t.Errorf("%s: expected error to not modify target key, got %s", desc.label, unmarshalled.Key()) + } + if unmarshalled.StringValue() != "untouched" { + t.Errorf("%s: expected error to not modify target value, got %v", desc.label, unmarshalled.Value()) + } + if desc.expectedError == nil { + t.Errorf("%s: got error %v, expected no error", desc.label, err) + } else if !strings.Contains(err.Error(), desc.expectedError.Error()) { + t.Errorf("%s: got error %q, expected error %q", desc.label, err.Error(), desc.expectedError.Error()) + } + continue + } + + checkEntry(t, desc.label, unmarshalled, true, desc.expectedKey, desc.hasValue, desc.expectedValue) + } +} diff --git a/golang/cosmos/x/vstorage/vstorage.go b/golang/cosmos/x/vstorage/vstorage.go index 7885ae59199..3df0da359de 100644 --- a/golang/cosmos/x/vstorage/vstorage.go +++ b/golang/cosmos/x/vstorage/vstorage.go @@ -70,7 +70,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s case "set": for _, arg := range msg.Args { var entry agoric.KVEntry - entry, err = agoric.UnmarshalKVEntry(arg) + err = json.Unmarshal(arg, &entry) if err != nil { return } @@ -84,7 +84,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s case "legacySet": for _, arg := range msg.Args { var entry agoric.KVEntry - entry, err = agoric.UnmarshalKVEntry(arg) + err = json.Unmarshal(arg, &entry) if err != nil { return } @@ -96,7 +96,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s case "setWithoutNotify": for _, arg := range msg.Args { var entry agoric.KVEntry - entry, err = agoric.UnmarshalKVEntry(arg) + err = json.Unmarshal(arg, &entry) if err != nil { return } @@ -107,7 +107,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s case "append": for _, arg := range msg.Args { var entry agoric.KVEntry - entry, err = agoric.UnmarshalKVEntry(arg) + err = json.Unmarshal(arg, &entry) if err != nil { return } @@ -131,10 +131,7 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s } entry := keeper.GetEntry(cctx.Context, path) - if !entry.HasValue() { - return "null", nil - } - bz, err := json.Marshal(entry.StringValue()) + bz, err := json.Marshal(entry.Value()) if err != nil { return "", err } @@ -194,13 +191,13 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s return } children := keeper.GetChildren(cctx.Context, path) - entries := make([][]interface{}, len(children.Children)) + entries := make([]agoric.KVEntry, len(children.Children)) for i, child := range children.Children { entry := keeper.GetEntry(cctx.Context, fmt.Sprintf("%s.%s", path, child)) if !entry.HasValue() { - entries[i] = []interface{}{child} + entries[i] = agoric.NewKVEntryWithNoValue(child) } else { - entries[i] = []interface{}{child, entry.Value()} + entries[i] = agoric.NewKVEntry(child, entry.StringValue()) } } bytes, err := json.Marshal(entries) @@ -216,9 +213,9 @@ func (sh vstorageHandler) Receive(cctx *vm.ControllerContext, str string) (ret s return } children := keeper.GetChildren(cctx.Context, path) - vals := make([]string, len(children.Children)) + vals := make([]*string, len(children.Children)) for i, child := range children.Children { - vals[i] = keeper.GetEntry(cctx.Context, fmt.Sprintf("%s.%s", path, child)).StringValue() + vals[i] = keeper.GetEntry(cctx.Context, fmt.Sprintf("%s.%s", path, child)).Value() } bytes, err := json.Marshal(vals) if err != nil { diff --git a/golang/cosmos/x/vstorage/vstorage_test.go b/golang/cosmos/x/vstorage/vstorage_test.go index 6dc1bd84576..5817e1ade25 100644 --- a/golang/cosmos/x/vstorage/vstorage_test.go +++ b/golang/cosmos/x/vstorage/vstorage_test.go @@ -153,7 +153,7 @@ func doTestSet(t *testing.T, method string, expectNotify bool) { // TODO: Fully validate input before making changes // args: []interface{}{[]string{"foo", "X"}, []interface{}{42, "new"}}, args: []interface{}{[]interface{}{42, "new"}}, - errContains: ptr("key"), + errContains: ptr("json"), }, {label: "non-string value", // TODO: Fully validate input before making changes From 393b91baaa25c61364955102e8cfcdcaec90870d Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sat, 22 Jul 2023 18:19:07 +0000 Subject: [PATCH 087/109] feat(cosmic-swingset): replace import/export options --- .../swingset/keeper/extension_snapshotter.go | 6 +- .../keeper/swing_store_exports_handler.go | 80 ++++++---- .../cosmic-swingset/src/export-kernel-db.js | 133 +++++++++++------ .../cosmic-swingset/src/import-kernel-db.js | 138 +++++++++++++----- 4 files changed, 244 insertions(+), 113 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index 8e4c1fc0f2e..5991c52fd20 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -125,8 +125,8 @@ func (snapshotter *ExtensionSnapshotter) InitiateSnapshot(height int64) error { blockHeight := uint64(height) return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter, SwingStoreExportOptions{ - ExportMode: SwingStoreExportModeCurrent, - IncludeExportData: false, + ArtifactMode: SwingStoreArtifactModeOperational, + ExportDataMode: SwingStoreExportDataModeSkip, }) } @@ -304,6 +304,6 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo return snapshotter.swingStoreExportsHandler.RestoreExport( SwingStoreExportProvider{BlockHeight: blockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}, - SwingStoreRestoreOptions{IncludeHistorical: false}, + SwingStoreRestoreOptions{ArtifactMode: SwingStoreArtifactModeOperational}, ) } diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index a0c34268102..84ee6dc1acb 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -72,7 +72,7 @@ import ( // - OnExportRetrieved reads the export using the provider. // // Restoring a swing-store export does not have similar non-blocking requirements. -// The component simply invokes swingStoreExportHandler.RestoreExport with a +// The component simply invokes swingStoreExportsHandler.RestoreExport with a // SwingStoreExportProvider representing the swing-store export to // be restored, and RestoreExport will consume it and block until the JS side // has completed the restore before returning. @@ -157,44 +157,71 @@ type swingStoreRestoreExportAction struct { Args [1]swingStoreImportOptions `json:"args"` } -// SwingStoreExportModeCurrent represents the minimal set of artifacts needed -// to operate a node. -const SwingStoreExportModeCurrent = "current" +const ( + // SwingStoreArtifactModeNone means that no artifacts are part of the + // export / import. + SwingStoreArtifactModeNone = "none" -// SwingStoreExportModeArchival represents the set of all artifacts needed to -// not lose any historical state. -const SwingStoreExportModeArchival = "archival" + // SwingStoreArtifactModeOperational represents the minimal set of artifacts + // needed to operate a node. + SwingStoreArtifactModeOperational = "operational" -// SwingStoreExportModeDebug represents the maximal set of artifacts available -// in the JS swing-store, including any kept around for debugging purposed only -// (like previous XS heap snapshots) -const SwingStoreExportModeDebug = "debug" + // SwingStoreArtifactModeReplay represents the set of artifacts needed to + // replay the current incarnation of every vat. + SwingStoreArtifactModeReplay = "replay" + + // SwingStoreArtifactModeArchival represents the set of all artifacts + // providing all available historical state. + SwingStoreArtifactModeArchival = "archival" + + // SwingStoreArtifactModeDebug represents the maximal set of artifacts + // available in the JS swing-store, including any kept around for debugging + // purposes only (like previous XS heap snapshots) + SwingStoreArtifactModeDebug = "debug" +) + +const ( + // SwingStoreExportDataModeSkip indicates "export data" should be excluded from + // an export. ArtifactMode cannot be "none" in this case. + SwingStoreExportDataModeSkip = "skip" + + // SwingStoreExportDataModeAll indicates "export data" should be part of the + // export or import. For import, ArtifactMode cannot be "none". + SwingStoreExportDataModeAll = "all" +) // SwingStoreExportOptions are configurable options provided to the JS swing-store export type SwingStoreExportOptions struct { - // The export mode can be "current", "archival" or "debug" (SwingStoreExportMode* const) - // See packages/cosmic-swingset/src/export-kernel-db.js initiateSwingStoreExport and - // packages/swing-store/src/swingStore.js makeSwingStoreExporter - ExportMode string `json:"exportMode,omitempty"` - // A flag indicating whether "export data" should be part of the swing-store export - // If false, the resulting SwingStoreExportProvider's GetExportDataReader - // will return nil - IncludeExportData bool `json:"includeExportData,omitempty"` + // ArtifactMode controls the set of artifacts that should be included in the + // swing-store export. Any SwingStoreArtifactMode* const value can be used + // (None, Operational, Replay, Archival, Debug). + // See packages/cosmic-swingset/src/export-kernel-db.js initiateSwingStoreExport + ArtifactMode string `json:"artifactMode,omitempty"` + // ExportDataMode selects whether to include "export data" in the swing-store + // export or not. Use the value SwingStoreExportDataModeSkip or + // SwingStoreExportDataModeAll. If "skip", the reader returned by + // SwingStoreExportProvider's GetExportDataReader will be nil. + ExportDataMode string `json:"exportDataMode,omitempty"` } // SwingStoreRestoreOptions are configurable options provided to the JS swing-store import type SwingStoreRestoreOptions struct { - // A flag indicating whether the swing-store import should attempt to load - // all historical artifacts available from the export provider - IncludeHistorical bool `json:"includeHistorical,omitempty"` + // ArtifactMode controls the set of artifacts that should be restored in + // swing-store. Any SwingStoreArtifactMode* const value can be used + // (None, Operational, Replay, Archival, Debug). + // See packages/cosmic-swingset/src/import-kernel-db.js performStateSyncImport + ArtifactMode string `json:"artifactMode,omitempty"` } type swingStoreImportOptions struct { // ExportDir is the directory created by RestoreExport that JS swing-store // should import from. ExportDir string `json:"exportDir"` - // IncludeHistorical is a copy of SwingStoreRestoreOptions.IncludeHistorical - IncludeHistorical bool `json:"includeHistorical,omitempty"` + // ArtifactMode is a copy of SwingStoreRestoreOptions.ArtifactMode + ArtifactMode string `json:"artifactMode,omitempty"` + // ExportDataMode must currently be "all" for import, since "export data" is + // needed to restore a swing-store. + ExportDataMode string `json:"exportDataMode,omitempty"` } var disallowedArtifactNameChar = regexp.MustCompile(`[^-_.a-zA-Z0-9]`) @@ -781,8 +808,9 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore BlockHeight: blockHeight, Request: restoreRequest, Args: [1]swingStoreImportOptions{{ - ExportDir: exportDir, - IncludeHistorical: restoreOptions.IncludeHistorical, + ExportDir: exportDir, + ArtifactMode: restoreOptions.ArtifactMode, + ExportDataMode: SwingStoreExportDataModeAll, }}, } diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index 3da83614a08..f6a89ece00b 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -23,30 +23,55 @@ import { makeProcessValue } from './helpers/process-value.js'; // with the golang SwingStoreExportsHandler in golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go export const ExportManifestFileName = 'export-manifest.json'; -/** @typedef {'current' | 'archival' | 'debug'} SwingStoreExportMode */ +/** + * @typedef {'none' // No artifacts included + * | import("@agoric/swing-store").ArtifactMode + * } SwingStoreArtifactMode + */ + +/** + * @typedef {'skip' // Do not include any "export data" (artifacts only) + * | 'all' // Include all export data, create new swing-store on import + * } SwingStoreExportDataMode + */ /** - * @param {SwingStoreExportMode | undefined} exportMode + * @param {SwingStoreArtifactMode | undefined} artifactMode * @returns {import("@agoric/swing-store").ArtifactMode} */ -const getArtifactModeFromExportMode = exportMode => { - switch (exportMode) { - case 'current': +export const getEffectiveArtifactMode = artifactMode => { + switch (artifactMode) { case undefined: + case 'none': + case 'operational': return 'operational'; + case 'replay': case 'archival': - return 'archival'; case 'debug': - return 'debug'; + return artifactMode; default: - throw Fail`Invalid value ${q(exportMode)} for "export-mode"`; + throw Fail`Invalid value ${q(artifactMode)} for "artifact-mode"`; } }; +/** @type {(artifactMode: string | undefined) => asserts artifactMode is SwingStoreArtifactMode | undefined} */ +export const checkArtifactMode = getEffectiveArtifactMode; + /** - * @type {(exportMode: string | undefined) => asserts exportMode is SwingStoreExportMode} + * @param {string | undefined} mode + * @returns {asserts mode is SwingStoreExportDataMode | undefined} */ -const checkExportMode = getArtifactModeFromExportMode; +export const checkExportDataMode = mode => { + switch (mode) { + case 'skip': + case undefined: + break; + case 'all': + break; + default: + throw Fail`Invalid value ${q(mode)} for "export-data-mode"`; + } +}; /** * A state-sync manifest is a representation of the information contained in a @@ -60,7 +85,7 @@ const checkExportMode = getArtifactModeFromExportMode; * * @typedef {object} StateSyncManifest * @property {number} blockHeight the block height corresponding to this export - * @property {SwingStoreExportMode} [mode] + * @property {SwingStoreArtifactMode} [artifactMode] * @property {string} [data] file name containing the swingStore "export data" * @property {Array<[artifactName: string, fileName: string]>} artifacts * List of swingStore export artifacts which can be validated by the export data @@ -79,8 +104,8 @@ const checkExportMode = getArtifactModeFromExportMode; * @property {string} stateDir the directory containing the SwingStore to export * @property {string} exportDir the directory in which to place the exported artifacts and manifest * @property {number} [blockHeight] block height to check for - * @property {SwingStoreExportMode} [exportMode] whether to include historical or debug artifacts in the export - * @property {boolean} [includeExportData] whether to include an artifact for the export data in the export + * @property {SwingStoreArtifactMode} [artifactMode] the level of artifacts to include in the export + * @property {SwingStoreExportDataMode} [exportDataMode] include a synthetic artifact for the export data in the export */ /** @@ -96,10 +121,12 @@ export const validateExporterOptions = options => { options.blockHeight == null || typeof options.blockHeight === 'number' || Fail`optional blockHeight option not a number`; - checkExportMode(options.exportMode); - options.includeExportData == null || - typeof options.includeExportData === 'boolean' || - Fail`optional includeExportData option not a boolean`; + checkArtifactMode(options.artifactMode); + checkExportDataMode(options.exportDataMode); + + options.includeExportData === undefined || + Fail`deprecated includeExportData option found`; + options.exportMode === undefined || Fail`deprecated exportMode option found`; }; /** @@ -113,7 +140,7 @@ export const validateExporterOptions = options => { * @returns {StateSyncExporter} */ export const initiateSwingStoreExport = ( - { stateDir, exportDir, blockHeight, exportMode, includeExportData }, + { stateDir, exportDir, blockHeight, artifactMode, exportDataMode }, { fs: { open, writeFile }, pathResolve, @@ -122,8 +149,7 @@ export const initiateSwingStoreExport = ( log = console.log, }, ) => { - const artifactMode = getArtifactModeFromExportMode(exportMode); - + const effectiveArtifactMode = getEffectiveArtifactMode(artifactMode); /** @type {number | undefined} */ let savedBlockHeight; @@ -143,7 +169,9 @@ export const initiateSwingStoreExport = ( const manifestFile = await open(manifestPath, 'wx'); cleanup.push(async () => manifestFile.close()); - const swingStoreExporter = makeExporter(stateDir, { artifactMode }); + const swingStoreExporter = makeExporter(stateDir, { + artifactMode: effectiveArtifactMode, + }); cleanup.push(async () => swingStoreExporter.close()); const { hostStorage } = openDB(stateDir); @@ -153,7 +181,9 @@ export const initiateSwingStoreExport = ( if (blockHeight) { blockHeight === savedBlockHeight || - Fail`DB at unexpected block height ${savedBlockHeight} (expected ${blockHeight})`; + Fail`DB at unexpected block height ${q(savedBlockHeight)} (expected ${q( + blockHeight, + )})`; } abortIfStopped(); @@ -163,11 +193,11 @@ export const initiateSwingStoreExport = ( /** @type {StateSyncManifest} */ const manifest = { blockHeight: savedBlockHeight, - mode: exportMode, + artifactMode: artifactMode || effectiveArtifactMode, artifacts: [], }; - if (includeExportData) { + if (exportDataMode === 'all') { log?.(`Writing Export Data`); const fileName = `export-data.jsonl`; // eslint-disable-next-line @jessie.js/no-nested-await @@ -183,14 +213,16 @@ export const initiateSwingStoreExport = ( } abortIfStopped(); - for await (const artifactName of swingStoreExporter.getArtifactNames()) { - abortIfStopped(); - log?.(`Writing artifact: ${artifactName}`); - const artifactData = swingStoreExporter.getArtifact(artifactName); - // Use artifactName as the file name as we trust swingStore to generate - // artifact names that are valid file names. - await writeFile(pathResolve(exportDir, artifactName), artifactData); - manifest.artifacts.push([artifactName, artifactName]); + if (artifactMode !== 'none') { + for await (const artifactName of swingStoreExporter.getArtifactNames()) { + abortIfStopped(); + log?.(`Writing artifact: ${artifactName}`); + const artifactData = swingStoreExporter.getArtifact(artifactName); + // Use artifactName as the file name as we trust swingStore to generate + // artifact names that are valid file names. + await writeFile(pathResolve(exportDir, artifactName), artifactData); + manifest.artifacts.push([artifactName, artifactName]); + } } await manifestFile.write(JSON.stringify(manifest, null, 2)); @@ -274,11 +306,22 @@ export const main = async ( /** @type {string} */ (processValue.getFlag('export-dir', '.')), ); - const includeExportData = processValue.getBoolean({ - flagName: 'include-export-data', - }); - const exportMode = processValue.getFlag('export-mode'); - checkExportMode(exportMode); + const artifactMode = /** @type {SwingStoreArtifactMode | undefined} */ ( + processValue.getFlag('artifact-mode') + ); + checkArtifactMode(artifactMode); + + const exportDataMode = processValue.getFlag('export-data-mode'); + checkExportDataMode(exportDataMode); + + if ( + processValue.getBoolean({ flagName: 'include-export-data' }) !== undefined + ) { + throw Fail`deprecated "include-export-data" options, use "export-data-mode" instead`; + } + if (processValue.getFlag('export-mode') !== undefined) { + throw Fail`deprecated "export-mode" options, use "artifact-mode" instead`; + } const checkBlockHeight = processValue.getInteger({ flagName: 'check-block-height', @@ -295,8 +338,8 @@ export const main = async ( stateDir, exportDir, blockHeight: checkBlockHeight, - exportMode, - includeExportData, + artifactMode, + exportDataMode, }, { fs, @@ -337,7 +380,7 @@ export const main = async ( * @returns {StateSyncExporter} */ export const spawnSwingStoreExport = ( - { stateDir, exportDir, blockHeight, exportMode, includeExportData }, + { stateDir, exportDir, blockHeight, artifactMode, exportDataMode }, { fork, verbose }, ) => { const args = ['--state-dir', stateDir, '--export-dir', exportDir]; @@ -346,12 +389,12 @@ export const spawnSwingStoreExport = ( args.push('--check-block-height', String(blockHeight)); } - if (exportMode) { - args.push('--export-mode', exportMode); + if (artifactMode) { + args.push('--artifact-mode', artifactMode); } - if (includeExportData) { - args.push('--include-export-data'); + if (exportDataMode) { + args.push('--export-data-mode', exportDataMode); } if (verbose) { @@ -403,7 +446,7 @@ export const spawnSwingStoreExport = ( } default: { // @ts-expect-error exhaustive check - Fail`Unexpected ${msg.type} message`; + Fail`Unexpected ${q(msg.type)} message`; } } }; diff --git a/packages/cosmic-swingset/src/import-kernel-db.js b/packages/cosmic-swingset/src/import-kernel-db.js index bed9d42cfb8..9f347041b8b 100755 --- a/packages/cosmic-swingset/src/import-kernel-db.js +++ b/packages/cosmic-swingset/src/import-kernel-db.js @@ -12,19 +12,24 @@ import fsPromisesPower from 'fs/promises'; import pathPower from 'path'; import BufferLineTransform from '@agoric/internal/src/node/buffer-line-transform.js'; -import { Fail } from '@agoric/assert'; +import { Fail, q } from '@agoric/assert'; import { importSwingStore } from '@agoric/swing-store'; import { isEntrypoint } from './helpers/is-entrypoint.js'; import { makeProcessValue } from './helpers/process-value.js'; -import { ExportManifestFileName } from './export-kernel-db.js'; +import { + ExportManifestFileName, + checkExportDataMode, + checkArtifactMode, +} from './export-kernel-db.js'; /** * @typedef {object} StateSyncImporterOptions * @property {string} stateDir the directory containing the SwingStore to export * @property {string} exportDir the directory where to place the exported artifacts and manifest * @property {number} [blockHeight] block height to check for - * @property {boolean} [includeHistorical] whether to include historical artifacts in the export + * @property {import('./export-kernel-db.js').SwingStoreExportDataMode} [exportDataMode] how to handle export data + * @property {import('./export-kernel-db.js').SwingStoreArtifactMode} [artifactMode] the level of historical artifacts to import */ /** @@ -40,9 +45,57 @@ export const validateImporterOptions = options => { options.blockHeight == null || typeof options.blockHeight === 'number' || Fail`optional blockHeight option not a number`; - options.includeHistorical == null || - typeof options.includeHistorical === 'boolean' || - Fail`optional includeHistorical option not a boolean`; + checkExportDataMode(options.exportDataMode); + checkArtifactMode(options.artifactMode); + options.includeHistorical === undefined || + Fail`deprecated includeHistorical option found`; +}; + +/** + * @param {Pick} options + * @param {Readonly} manifest + * @returns {import('@agoric/swing-store').ImportSwingStoreOptions} + */ +const checkAndGetImportSwingStoreOptions = (options, manifest) => { + typeof manifest.blockHeight === 'number' || + Fail`Cannot restore snapshot without block height`; + + manifest.data || Fail`State-sync manifest missing export data`; + + const { artifactMode = manifest.artifactMode || 'debug' } = options; + + if (artifactMode === 'none') { + throw Fail`Cannot import "export data" without at least "operational" artifacts`; + } + + manifest.artifacts?.length || + Fail`State-sync manifest missing required artifacts`; + + switch (artifactMode) { + case 'debug': + // eslint-disable-next-line no-fallthrough + case 'operational': + if (manifest.artifactMode === 'operational') break; + // eslint-disable-next-line no-fallthrough + case 'replay': + if (manifest.artifactMode === 'replay') break; + // eslint-disable-next-line no-fallthrough + case 'archival': + if (manifest.artifactMode === 'archival') break; + if ( + manifest.artifactMode === undefined || + manifest.artifactMode === 'debug' + ) { + // assume the export has sufficient data + break; + } + throw Fail`State-sync manifest has insufficient artifacts: requested import artifact mode: ${q( + artifactMode, + )}, manifest has ${q(manifest.artifactMode)} artifacts`; + default: + throw Fail`Unexpected artifactMode ${q(artifactMode)}`; + } + return { artifactMode }; }; /** @@ -55,7 +108,7 @@ export const validateImporterOptions = options => { * @returns {Promise} */ export const performStateSyncImport = async ( - { stateDir, exportDir, blockHeight, includeHistorical }, + { stateDir, exportDir, blockHeight, exportDataMode = 'all', artifactMode }, { fs: { createReadStream, readFile }, pathResolve, @@ -67,7 +120,9 @@ export const performStateSyncImport = async ( const safeExportFileResolve = allegedRelativeFilename => { const resolvedPath = pathResolve(exportDir, allegedRelativeFilename); resolvedPath.startsWith(exportDir) || - Fail`Exported file ${allegedRelativeFilename} must be in export dir ${exportDir}`; + Fail`Exported file ${q( + allegedRelativeFilename, + )} must be in export dir ${q(exportDir)}`; return resolvedPath; }; @@ -78,26 +133,12 @@ export const performStateSyncImport = async ( ); if (blockHeight !== undefined && manifest.blockHeight !== blockHeight) { - Fail`State-sync manifest for unexpected block height ${manifest.blockHeight} (expected ${blockHeight})`; + Fail`State-sync manifest for unexpected block height ${q( + manifest.blockHeight, + )} (expected ${q(blockHeight)})`; } - if (!manifest.data) { - throw Fail`State-sync manifest missing export data`; - } - - if (!manifest.artifacts) { - throw Fail`State-sync manifest missing required artifacts`; - } - - const artifacts = harden(Object.fromEntries(manifest.artifacts)); - - if ( - includeHistorical && - manifest.mode !== 'archival' && - manifest.mode !== 'debug' - ) { - throw Fail`State-sync manifest missing historical artifacts`; - } + const artifacts = harden(Object.fromEntries(manifest.artifacts || [])); // Represent the data in `exportDir` as a SwingSetExporter object. /** @type {import('@agoric/swing-store').SwingStoreExporter} */ @@ -128,7 +169,7 @@ export const performStateSyncImport = async ( log?.(`importing artifact ${name}`); const fileName = artifacts[name]; if (!fileName) { - Fail`invalid artifact ${name}`; + Fail`invalid artifact ${q(name)}`; } const stream = createReadStream(safeExportFileResolve(fileName)); yield* stream; @@ -141,17 +182,24 @@ export const performStateSyncImport = async ( }, }); - const artifactMode = includeHistorical - ? 'debug' // for now don't enforce completeness but allow importing all provided artifacts - : 'operational'; + if (exportDataMode === 'all') { + const importOptions = checkAndGetImportSwingStoreOptions( + { artifactMode, exportDataMode }, + manifest, + ); - const swingstore = await importDB(exporter, stateDir, { artifactMode }); + const swingstore = await importDB(exporter, stateDir, importOptions); - const { hostStorage } = swingstore; + const { hostStorage } = swingstore; - hostStorage.kvStore.set('host.height', String(manifest.blockHeight)); - await hostStorage.commit(); - await hostStorage.close(); + hostStorage.kvStore.set('host.height', String(manifest.blockHeight)); + await hostStorage.commit(); + await hostStorage.close(); + } else if (exportDataMode === 'skip') { + throw Fail`Repopulation of artifacts not yet supported`; + } else { + throw Fail`Unknown export-data-mode ${exportDataMode}`; + } }; /** @@ -186,9 +234,20 @@ export const main = async ( /** @type {string} */ (processValue.getFlag('export-dir', '.')), ); - const includeHistorical = processValue.getBoolean({ - flagName: 'include-historical', - }); + const artifactMode = + /** @type {import('./export-kernel-db.js').SwingStoreArtifactMode | undefined} */ ( + processValue.getFlag('artifact-mode') + ); + checkArtifactMode(artifactMode); + + const exportDataMode = processValue.getFlag('export-data-mode'); + checkExportDataMode(exportDataMode); + + if ( + processValue.getBoolean({ flagName: 'include-historical' }) !== undefined + ) { + throw Fail`deprecated "include-historical" options, use "artifact-mode" instead`; + } const checkBlockHeight = processValue.getInteger({ flagName: 'check-block-height', @@ -203,7 +262,8 @@ export const main = async ( stateDir, exportDir, blockHeight: checkBlockHeight, - includeHistorical, + artifactMode, + exportDataMode, }, { fs, From 3361b25ddaa00116476d3de1107e800499ab5c21 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 10 Jul 2023 20:25:13 +0000 Subject: [PATCH 088/109] feat(cosmos): wire new swingset port handler --- golang/cosmos/app/app.go | 48 ++++++++++--------- golang/cosmos/x/swingset/swingset.go | 40 ++++++++++++++++ packages/cosmic-swingset/src/chain-main.js | 4 ++ packages/vats/src/core/basic-behaviors.js | 1 + .../vats/test/test-vat-bank-integration.js | 1 + 5 files changed, 72 insertions(+), 22 deletions(-) create mode 100644 golang/cosmos/x/swingset/swingset.go diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 07779ef0916..b0b9c6b30cd 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -202,6 +202,7 @@ type GaiaApp struct { // nolint: golint controllerInited bool bootstrapNeeded bool lienPort int + swingsetPort int vbankPort int vibcPort int vstoragePort int @@ -459,6 +460,7 @@ func NewAgoricApp( app.VstorageKeeper, vbanktypes.ReservePoolName, callToController, ) + app.swingsetPort = vm.RegisterPortHandler("swingset", swingset.NewPortHandler(app.SwingSetKeeper)) app.SwingStoreExportsHandler = *swingsetkeeper.NewSwingStoreExportsHandler( app.Logger(), @@ -842,17 +844,18 @@ func normalizeModuleAccount(ctx sdk.Context, ak authkeeper.AccountKeeper, name s } type cosmosInitAction struct { - Type string `json:"type"` - ChainID string `json:"chainID"` - BlockTime int64 `json:"blockTime,omitempty"` - IsBootstrap bool `json:"isBootstrap"` - Params swingset.Params `json:"params"` - SupplyCoins sdk.Coins `json:"supplyCoins"` - UpgradePlan *upgradetypes.Plan `json:"upgradePlan,omitempty"` - LienPort int `json:"lienPort"` - StoragePort int `json:"storagePort"` - VbankPort int `json:"vbankPort"` - VibcPort int `json:"vibcPort"` + Type string `json:"type"` + ChainID string `json:"chainID"` + BlockTime int64 `json:"blockTime,omitempty"` + IsBootstrap bool `json:"isBootstrap"` + Params swingset.Params `json:"params"` + SupplyCoins sdk.Coins `json:"supplyCoins"` + UpgradePlan *upgradetypes.Plan `json:"upgradePlan,omitempty"` + LienPort int `json:"lienPort"` + StoragePort int `json:"storagePort"` + SwingsetPort int `json:"swingsetPort"` + VbankPort int `json:"vbankPort"` + VibcPort int `json:"vibcPort"` } // Name returns the name of the App @@ -882,17 +885,18 @@ func (app *GaiaApp) initController(ctx sdk.Context, bootstrap bool) { // Begin initializing the controller here. action := &cosmosInitAction{ - Type: "AG_COSMOS_INIT", - ChainID: ctx.ChainID(), - BlockTime: blockTime, - IsBootstrap: bootstrap, - Params: app.SwingSetKeeper.GetParams(ctx), - SupplyCoins: sdk.NewCoins(app.BankKeeper.GetSupply(ctx, "uist")), - UpgradePlan: app.upgradePlan, - LienPort: app.lienPort, - StoragePort: app.vstoragePort, - VbankPort: app.vbankPort, - VibcPort: app.vibcPort, + Type: "AG_COSMOS_INIT", + ChainID: ctx.ChainID(), + BlockTime: blockTime, + IsBootstrap: bootstrap, + Params: app.SwingSetKeeper.GetParams(ctx), + SupplyCoins: sdk.NewCoins(app.BankKeeper.GetSupply(ctx, "uist")), + UpgradePlan: app.upgradePlan, + LienPort: app.lienPort, + StoragePort: app.vstoragePort, + SwingsetPort: app.swingsetPort, + VbankPort: app.vbankPort, + VibcPort: app.vibcPort, } // This really abuses `BlockingSend` to get back at `sendToController` out, err := app.SwingSetKeeper.BlockingSend(ctx, action) diff --git a/golang/cosmos/x/swingset/swingset.go b/golang/cosmos/x/swingset/swingset.go new file mode 100644 index 00000000000..87af999fc5d --- /dev/null +++ b/golang/cosmos/x/swingset/swingset.go @@ -0,0 +1,40 @@ +package swingset + +import ( + "encoding/json" + "fmt" + + "github.com/Agoric/agoric-sdk/golang/cosmos/vm" +) + +// portHandler implements vm.PortHandler +// for processing inbound messages from Swingset. +type portHandler struct { + keeper Keeper +} + +type swingsetMessage struct { + Method string `json:"method"` + Args []json.RawMessage `json:"args"` +} + +// NewPortHandler returns a port handler for a swingset Keeper. +func NewPortHandler(k Keeper) vm.PortHandler { + return portHandler{keeper: k} +} + +// Receive implements the vm.PortHandler method. +// It receives and processes an inbound message, returning the +// JSON-serialized response or an error. +func (ph portHandler) Receive(ctx *vm.ControllerContext, str string) (string, error) { + var msg swingsetMessage + err := json.Unmarshal([]byte(str), &msg) + if err != nil { + return "", err + } + + switch msg.Method { + default: + return "", fmt.Errorf("unrecognized swingset method %s", msg.Method) + } +} diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 6ab413d33c1..9ed3e4ab362 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -649,6 +649,10 @@ export default async function main(progname, args, { env, homedir, agcc }) { !blockingSend || Fail`Swingset already initialized`; + if (action.swingsetPort) { + portNums.swingset = action.swingsetPort; + } + if (action.vibcPort) { portNums.dibc = action.vibcPort; } diff --git a/packages/vats/src/core/basic-behaviors.js b/packages/vats/src/core/basic-behaviors.js index b79d79f2948..4ad810a9073 100644 --- a/packages/vats/src/core/basic-behaviors.js +++ b/packages/vats/src/core/basic-behaviors.js @@ -40,6 +40,7 @@ const bootMsgEx = { { denom: 'ubld', amount: '1000000000000000' }, { denom: 'uist', amount: '50000000000' }, ], + swingsetPort: 4, vbankPort: 3, vibcPort: 2, }; diff --git a/packages/vats/test/test-vat-bank-integration.js b/packages/vats/test/test-vat-bank-integration.js index ed83b39a561..1a06e34c7fd 100644 --- a/packages/vats/test/test-vat-bank-integration.js +++ b/packages/vats/test/test-vat-bank-integration.js @@ -49,6 +49,7 @@ test('mintInitialSupply, addBankAssets bootstrap actions', async t => { chainID: 'ag', storagePort: 1, supplyCoins: [{ amount: '50000000', denom: 'uist' }], + swingsetPort: 4, vbankPort: 2, vibcPort: 3, }; From 2f702bdc15489119e28fda0a9cb93287ed7f8da7 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 20 Jul 2023 23:08:47 +0000 Subject: [PATCH 089/109] refactor(cosmos): add KVEntry helpers Use for swing-store export data Standardize Json handling of export data --- golang/cosmos/types/kv_entry_helpers.go | 220 ++++++++++++++++ golang/cosmos/types/kv_entry_helpers_test.go | 237 ++++++++++++++++++ .../swingset/keeper/extension_snapshotter.go | 15 +- .../keeper/swing_store_exports_handler.go | 35 ++- 4 files changed, 477 insertions(+), 30 deletions(-) create mode 100644 golang/cosmos/types/kv_entry_helpers.go create mode 100644 golang/cosmos/types/kv_entry_helpers_test.go diff --git a/golang/cosmos/types/kv_entry_helpers.go b/golang/cosmos/types/kv_entry_helpers.go new file mode 100644 index 00000000000..d6bd20b8e7a --- /dev/null +++ b/golang/cosmos/types/kv_entry_helpers.go @@ -0,0 +1,220 @@ +package types + +import ( + "encoding/json" + "fmt" + "io" + + vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" + sdk "github.com/cosmos/cosmos-sdk/types" +) + +// These helpers facilitate handling KVEntry streams, in particular for the +// swing-store "export data" use case. The goal is to avoid passing around +// large slices of key/value pairs. +// +// Handling of these streams is primarily accomplished through a KVEntryReader +// interface, with multiple implementations for different backing sources, as +// well as a helper function to consume a reader and write the entries into a +// byte Writer as line terminated json encoded KVEntry. + +// We attempt to pass sdk.Iterator around as much as possible to abstract a +// stream of Key/Value pairs without requiring the whole slice to be held in +// memory if possible. Cosmos SDK defines iterators as yielding Key/Value +// pairs, both as byte slices. +// +// More precisely, we define here the following: +// - A KVEntryReader interface allowing to Read the KVEntry one by one from an +// underlying source. +// - Multiple implementations of the KVEntryReader interface: +// - NewKVIteratorReader constructs a reader which consumes an sdk.Iterator. +// Keys and values are converted from byte slices to strings, and nil values +// are preserved as KVEntry instances with no value. +// - A generic reader which uses a slice of key/value data, and a conversion +// function from that data type to a KVEntry. The reader does bounds +// checking and keeps track of the current position. The following data +// types are available: +// - NewVstorageDataEntriesReader constructs a reader from a slice of +// vstorage DataEntry values. +// - NewJsonRawMessageKVEntriesReader constructs a reader from a slice of +// [key: string, value?: string | null] JSON array values. +// - NewJsonlKVEntryDecoderReader constructs a reader from an io.ReadCloser +// (like a file) containing JSON Lines in which each item is a +// [key: string, value?: string | null] array. +// - EncodeKVEntryReaderToJsonl consumes a KVEntryReader and writes its entries +// into an io.Writer as a sequence of single-line JSON texts. The encoding of +// each line is [key, value] if the KVEntry has a value, and [key] otherwise. +// This format terminates each line, but is still compatible with JSON Lines +// (which is line feed *separated*) for Go and JS decoders. + +// KVEntryReader is an abstraction for iteratively reading KVEntry data. +type KVEntryReader interface { + // Read returns the next KVEntry, or an error. + // An `io.EOF` error indicates that the previous Read() returned the final KVEntry. + Read() (KVEntry, error) + // Close frees the underlying resource (such as a slice or file descriptor). + Close() error +} + +var _ KVEntryReader = &kvIteratorReader{} + +// kvIteratorReader is a KVEntryReader backed by an sdk.Iterator +type kvIteratorReader struct { + iter sdk.Iterator +} + +// NewKVIteratorReader returns a KVEntryReader backed by an sdk.Iterator. +func NewKVIteratorReader(iter sdk.Iterator) KVEntryReader { + return &kvIteratorReader{ + iter: iter, + } +} + +// Read yields the next KVEntry from the source iterator +// Implements KVEntryReader +func (ir kvIteratorReader) Read() (next KVEntry, err error) { + if !ir.iter.Valid() { + // There is unfortunately no way to differentiate completion from iteration + // errors with the implementation of Iterators by cosmos-sdk since the + // iter.Error() returns an error in both cases + return KVEntry{}, io.EOF + } + + key := ir.iter.Key() + if len(key) == 0 { + return KVEntry{}, fmt.Errorf("nil or empty key yielded by iterator") + } + + value := ir.iter.Value() + ir.iter.Next() + if value == nil { + return NewKVEntryWithNoValue(string(key)), nil + } else { + return NewKVEntry(string(key), string(value)), nil + } +} + +func (ir kvIteratorReader) Close() error { + return ir.iter.Close() +} + +var _ KVEntryReader = &kvEntriesReader[any]{} + +// kvEntriesReader is the KVEntryReader using an underlying slice of generic +// kv entries. It reads from the slice sequentially using a type specific +// toKVEntry func, performing bounds checks, and tracking the position. +type kvEntriesReader[T any] struct { + entries []T + toKVEntry func(T) (KVEntry, error) + nextIndex int +} + +// Read yields the next KVEntry from the source +// Implements KVEntryReader +func (reader *kvEntriesReader[T]) Read() (next KVEntry, err error) { + if reader.entries == nil { + return KVEntry{}, fmt.Errorf("reader closed") + } + + length := len(reader.entries) + + if reader.nextIndex < length { + entry, err := reader.toKVEntry(reader.entries[reader.nextIndex]) + reader.nextIndex += 1 + if err != nil { + return KVEntry{}, err + } + if !entry.IsValidKey() { + return KVEntry{}, fmt.Errorf("source yielded a KVEntry with an invalid key") + } + return entry, err + } else if reader.nextIndex == length { + reader.nextIndex += 1 + return KVEntry{}, io.EOF + } else { + return KVEntry{}, fmt.Errorf("index %d is out of source bounds (length %d)", reader.nextIndex, length) + } +} + +// Close releases the source slice +// Implements KVEntryReader +func (reader *kvEntriesReader[any]) Close() error { + reader.entries = nil + return nil +} + +// NewVstorageDataEntriesReader creates a KVEntryReader backed by a +// vstorage DataEntry slice +func NewVstorageDataEntriesReader(vstorageDataEntries []*vstoragetypes.DataEntry) KVEntryReader { + return &kvEntriesReader[*vstoragetypes.DataEntry]{ + entries: vstorageDataEntries, + toKVEntry: func(sourceEntry *vstoragetypes.DataEntry) (KVEntry, error) { + return NewKVEntry(sourceEntry.Path, sourceEntry.Value), nil + }, + } +} + +// NewJsonRawMessageKVEntriesReader creates a KVEntryReader backed by +// a json.RawMessage slice +func NewJsonRawMessageKVEntriesReader(jsonEntries []json.RawMessage) KVEntryReader { + return &kvEntriesReader[json.RawMessage]{ + entries: jsonEntries, + toKVEntry: func(sourceEntry json.RawMessage) (entry KVEntry, err error) { + err = json.Unmarshal(sourceEntry, &entry) + return entry, err + }, + } +} + +var _ KVEntryReader = &jsonlKVEntryDecoderReader{} + +// jsonlKVEntryDecoderReader is the KVEntryReader decoding +// jsonl-like encoded key/value pairs. +type jsonlKVEntryDecoderReader struct { + closer io.Closer + decoder *json.Decoder +} + +// Read yields the next decoded KVEntry +// Implements KVEntryReader +func (reader jsonlKVEntryDecoderReader) Read() (next KVEntry, err error) { + err = reader.decoder.Decode(&next) + return next, err +} + +// Close release the underlying resource backing the decoder +// Implements KVEntryReader +func (reader jsonlKVEntryDecoderReader) Close() error { + return reader.closer.Close() +} + +// NewJsonlKVEntryDecoderReader creates a KVEntryReader over a byte +// stream reader that decodes each line as a json encoded KVEntry. The entries +// are yielded in order they're present in the stream. +func NewJsonlKVEntryDecoderReader(byteReader io.ReadCloser) KVEntryReader { + return &jsonlKVEntryDecoderReader{ + closer: byteReader, + decoder: json.NewDecoder(byteReader), + } +} + +// EncodeKVEntryReaderToJsonl consumes a KVEntryReader and JSON encodes each +// KVEntry, terminating by new lines. +// It will not Close the Reader when done +func EncodeKVEntryReaderToJsonl(reader KVEntryReader, bytesWriter io.Writer) (err error) { + encoder := json.NewEncoder(bytesWriter) + encoder.SetEscapeHTML(false) + for { + entry, err := reader.Read() + if err == io.EOF { + return nil + } else if err != nil { + return err + } + + err = encoder.Encode(entry) + if err != nil { + return err + } + } +} diff --git a/golang/cosmos/types/kv_entry_helpers_test.go b/golang/cosmos/types/kv_entry_helpers_test.go new file mode 100644 index 00000000000..3037b5f024d --- /dev/null +++ b/golang/cosmos/types/kv_entry_helpers_test.go @@ -0,0 +1,237 @@ +package types + +import ( + "bytes" + "errors" + "io" + "strings" + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" +) + +func toKVEntryIdentity(entry KVEntry) (KVEntry, error) { + return entry, nil +} + +func toKVEntryError(err error) (KVEntry, error) { + return KVEntry{}, err +} + +func checkSameKVEntry(t *testing.T, got KVEntry, expected KVEntry) { + if got.key != expected.key { + t.Errorf("got key %s, expected key %s", got.key, expected.key) + } + if got.value == nil && expected.value != nil { + t.Errorf("got nil value, expected string %s", *expected.value) + } else if got.value != nil && expected.value == nil { + t.Errorf("got string value %s, expected nil", *got.value) + } else if got.value != nil && expected.value != nil { + if *got.value != *expected.value { + t.Errorf("got string value %s, expected %s", *got.value, *expected.value) + } + } +} + +func TestKVEntriesReaderNormal(t *testing.T) { + source := []KVEntry{NewKVEntry("foo", "bar"), NewKVEntryWithNoValue("baz")} + reader := kvEntriesReader[KVEntry]{entries: source, toKVEntry: toKVEntryIdentity} + + got1, err := reader.Read() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + checkSameKVEntry(t, got1, source[0]) + + got2, err := reader.Read() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + checkSameKVEntry(t, got2, source[1]) + + _, err = reader.Read() + if err != io.EOF { + t.Errorf("expected error io.EOF, got %v", err) + } + + _, err = reader.Read() + if err == nil || !strings.Contains(err.Error(), "bounds") { + t.Errorf("expected out of bounds error, got %v", err) + } + + err = reader.Close() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + _, err = reader.Read() + if err == nil || !strings.Contains(err.Error(), "reader closed") { + t.Errorf("expected reader closed error, got %v", err) + } +} + +func TestKVEntriesReaderErrors(t *testing.T) { + source := []error{errors.New("foo"), errors.New("bar")} + reader := kvEntriesReader[error]{entries: source, toKVEntry: toKVEntryError} + + _, err := reader.Read() + if err != source[0] { + t.Errorf("got error %v, expected error %v", err, source[0]) + } + + // Nothing in the reader prevents reading after previous errors + _, err = reader.Read() + if err != source[1] { + t.Errorf("got error %v, expected error %v", err, source[1]) + } + + _, err = reader.Read() + if err != io.EOF { + t.Errorf("expected error io.EOF, got %v", err) + } +} + +type kvEntryReaderIterator struct { + reader KVEntryReader + current KVEntry + err error +} + +// newKVEntryReaderIterator creates an iterator over a KVEntryReader. +// KVEntry keys and values are reported as []byte from the reader in order. +func newKVEntryReaderIterator(reader KVEntryReader) sdk.Iterator { + iter := &kvEntryReaderIterator{ + reader: reader, + } + iter.Next() + return iter +} + +// Domain implements sdk.Iterator +func (iter *kvEntryReaderIterator) Domain() (start []byte, end []byte) { + return nil, nil +} + +// Valid returns whether the current iterator is valid. Once invalid, the +// Iterator remains invalid forever. +func (iter *kvEntryReaderIterator) Valid() bool { + if iter.err == io.EOF { + return false + } else if iter.err != nil { + panic(iter.err) + } + return true +} + +// checkValid implements the validity invariants of sdk.Iterator methods. +func (iter *kvEntryReaderIterator) checkValid() { + if !iter.Valid() { + panic("invalid iterator") + } +} + +// Next moves the iterator to the next entry from the reader. +// If Valid() returns false, this method will panic. +func (iter *kvEntryReaderIterator) Next() { + iter.checkValid() + + iter.current, iter.err = iter.reader.Read() +} + +// Key returns the key at the current position. Panics if the iterator is invalid. +// CONTRACT: key readonly []byte +func (iter *kvEntryReaderIterator) Key() (key []byte) { + iter.checkValid() + + return []byte(iter.current.Key()) +} + +// Value returns the value at the current position. Panics if the iterator is invalid. +// CONTRACT: value readonly []byte +func (iter *kvEntryReaderIterator) Value() (value []byte) { + iter.checkValid() + + if !iter.current.HasValue() { + return nil + } else { + return []byte(iter.current.StringValue()) + } +} + +// Error returns the last error encountered by the iterator, if any. +func (iter *kvEntryReaderIterator) Error() error { + err := iter.err + if err == io.EOF { + return nil + } + + return err +} + +// Close closes the iterator, releasing any allocated resources. +func (iter *kvEntryReaderIterator) Close() error { + return iter.reader.Close() +} + +func TestKVIteratorReader(t *testing.T) { + source := []KVEntry{NewKVEntry("foo", "bar"), NewKVEntryWithNoValue("baz")} + iterator := newKVEntryReaderIterator(&kvEntriesReader[KVEntry]{entries: source, toKVEntry: toKVEntryIdentity}) + reader := NewKVIteratorReader(iterator) + + got1, err := reader.Read() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + checkSameKVEntry(t, got1, source[0]) + + got2, err := reader.Read() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + checkSameKVEntry(t, got2, source[1]) + + _, err = reader.Read() + if err != io.EOF { + t.Errorf("expected error io.EOF, got %v", err) + } + + err = reader.Close() + if err != nil { + t.Errorf("unexpected error: %v", err) + } +} + +func TestJsonlEncodeAndReadBack(t *testing.T) { + source := []KVEntry{NewKVEntry("foo", "bar"), NewKVEntryWithNoValue("baz")} + sourceReader := &kvEntriesReader[KVEntry]{entries: source, toKVEntry: toKVEntryIdentity} + + var encodedKVEntries bytes.Buffer + err := EncodeKVEntryReaderToJsonl(sourceReader, &encodedKVEntries) + if err != nil { + t.Errorf("unexpected encode error %v", err) + } + + jsonlReader := NewJsonlKVEntryDecoderReader(io.NopCloser(&encodedKVEntries)) + + got1, err := jsonlReader.Read() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + checkSameKVEntry(t, got1, source[0]) + + got2, err := jsonlReader.Read() + if err != nil { + t.Errorf("unexpected error: %v", err) + } + checkSameKVEntry(t, got2, source[1]) + + _, err = jsonlReader.Read() + if err != io.EOF { + t.Errorf("expected error io.EOF, got %v", err) + } + + err = jsonlReader.Close() + if err != nil { + t.Errorf("unexpected error: %v", err) + } +} diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index 920c3a3228e..c7700cc84bd 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -2,12 +2,12 @@ package keeper import ( "bytes" - "encoding/json" "errors" "fmt" "io" "math" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" "github.com/cosmos/cosmos-sdk/baseapp" @@ -249,6 +249,8 @@ func (snapshotter *ExtensionSnapshotter) OnExportRetrieved(provider SwingStoreEx if len(swingStoreExportDataEntries) == 0 { return nil } + exportDataReader := agoric.NewVstorageDataEntriesReader(swingStoreExportDataEntries) + defer exportDataReader.Close() // For debugging, write out any retrieved export data as a single untrusted artifact // which has the same encoding as the internal SwingStore export data representation: @@ -256,14 +258,9 @@ func (snapshotter *ExtensionSnapshotter) OnExportRetrieved(provider SwingStoreEx exportDataArtifact := types.SwingStoreArtifact{Name: UntrustedExportDataArtifactName} var encodedExportData bytes.Buffer - encoder := json.NewEncoder(&encodedExportData) - encoder.SetEscapeHTML(false) - for _, dataEntry := range swingStoreExportDataEntries { - entry := []string{dataEntry.Path, dataEntry.Value} - err := encoder.Encode(entry) - if err != nil { - return err - } + err = agoric.EncodeKVEntryReaderToJsonl(exportDataReader, &encodedExportData) + if err != nil { + return err } exportDataArtifact.Data = encodedExportData.Bytes() diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index 4b558b09df9..b2aa4bacb7d 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -9,6 +9,7 @@ import ( "path/filepath" "regexp" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" @@ -625,26 +626,22 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved if err != nil { return nil, err } - defer dataFile.Close() + exportDataReader := agoric.NewJsonlKVEntryDecoderReader(dataFile) + defer exportDataReader.Close() - decoder := json.NewDecoder(dataFile) for { - var jsonEntry []string - err = decoder.Decode(&jsonEntry) + entry, err := exportDataReader.Read() if err == io.EOF { - break + return entries, nil } else if err != nil { - return nil, err + return []*vstoragetypes.DataEntry{}, err } - - if len(jsonEntry) != 2 { - return nil, fmt.Errorf("invalid export data entry (length %d)", len(jsonEntry)) + if !entry.HasValue() { + return []*vstoragetypes.DataEntry{}, fmt.Errorf("export data entry must have value") } - entry := vstoragetypes.DataEntry{Path: jsonEntry[0], Value: jsonEntry[1]} - entries = append(entries, &entry) + dataEntry := vstoragetypes.DataEntry{Path: entry.Key(), Value: entry.StringValue()} + entries = append(entries, &dataEntry) } - - return entries, nil } nextArtifact := 0 @@ -737,14 +734,10 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore } defer exportDataFile.Close() - encoder := json.NewEncoder(exportDataFile) - encoder.SetEscapeHTML(false) - for _, dataEntry := range exportDataEntries { - entry := []string{dataEntry.Path, dataEntry.Value} - err := encoder.Encode(entry) - if err != nil { - return err - } + exportDataReader := agoric.NewVstorageDataEntriesReader(exportDataEntries) + err = agoric.EncodeKVEntryReaderToJsonl(exportDataReader, exportDataFile) + if err != nil { + return err } err = exportDataFile.Sync() From c037ea3931877fe4d56df5b82cc7c3eb77a84a53 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 12 Jul 2023 23:27:16 +0000 Subject: [PATCH 090/109] fix(x/swingset): switch export/import to replay artifact level Effectively includes historical artifacts in state-sync mitigates the lack of full transcripts for current vat incarnations --- golang/cosmos/x/swingset/keeper/extension_snapshotter.go | 4 ++-- packages/cosmic-swingset/src/export-kernel-db.js | 3 ++- packages/cosmic-swingset/src/import-kernel-db.js | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index 5991c52fd20..e067d5112e9 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -125,7 +125,7 @@ func (snapshotter *ExtensionSnapshotter) InitiateSnapshot(height int64) error { blockHeight := uint64(height) return snapshotter.swingStoreExportsHandler.InitiateExport(blockHeight, snapshotter, SwingStoreExportOptions{ - ArtifactMode: SwingStoreArtifactModeOperational, + ArtifactMode: SwingStoreArtifactModeReplay, ExportDataMode: SwingStoreExportDataModeSkip, }) } @@ -304,6 +304,6 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo return snapshotter.swingStoreExportsHandler.RestoreExport( SwingStoreExportProvider{BlockHeight: blockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}, - SwingStoreRestoreOptions{ArtifactMode: SwingStoreArtifactModeOperational}, + SwingStoreRestoreOptions{ArtifactMode: SwingStoreArtifactModeReplay}, ) } diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index f6a89ece00b..c8be41916f1 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -41,11 +41,12 @@ export const ExportManifestFileName = 'export-manifest.json'; */ export const getEffectiveArtifactMode = artifactMode => { switch (artifactMode) { - case undefined: case 'none': case 'operational': return 'operational'; + case undefined: case 'replay': + return 'replay'; case 'archival': case 'debug': return artifactMode; diff --git a/packages/cosmic-swingset/src/import-kernel-db.js b/packages/cosmic-swingset/src/import-kernel-db.js index 9f347041b8b..8ab3c402db0 100755 --- a/packages/cosmic-swingset/src/import-kernel-db.js +++ b/packages/cosmic-swingset/src/import-kernel-db.js @@ -62,7 +62,7 @@ const checkAndGetImportSwingStoreOptions = (options, manifest) => { manifest.data || Fail`State-sync manifest missing export data`; - const { artifactMode = manifest.artifactMode || 'debug' } = options; + const { artifactMode = manifest.artifactMode || 'replay' } = options; if (artifactMode === 'none') { throw Fail`Cannot import "export data" without at least "operational" artifacts`; From 3336b62fbb10bd2293a832f8c30e590530d14213 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 10 Jul 2023 21:37:38 +0000 Subject: [PATCH 091/109] feat(cosmic-swingset): use x/swingset for swing-store export data --- docs/architecture/state-sync.md | 13 +++- golang/cosmos/app/app.go | 9 ++- golang/cosmos/x/swingset/keeper/keeper.go | 12 ++- .../cosmos/x/swingset/keeper/keeper_test.go | 77 +++++++++++++++++++ golang/cosmos/x/swingset/swingset.go | 31 ++++++++ packages/cosmic-swingset/src/chain-main.js | 10 +-- 6 files changed, 136 insertions(+), 16 deletions(-) diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md index 06b860bea80..5d403707586 100644 --- a/docs/architecture/state-sync.md +++ b/docs/architecture/state-sync.md @@ -58,9 +58,14 @@ sequenceDiagram TM->>+A-M: EndBlock A-M->>+CM: END_BLOCK CM->>CM: runKernel() - CM-)A-M: vstorage->setWithoutNotify(prefixedExportDataEntries) + CM-)A-M: swingset->swingStoreUpdateExportData(exportDataEntries) + A-M->>A-M: swingStore := NewPrefixStore("swingStore.") loop each data entry - A-M->>+MS-M: vstorage.SetStorage() + alt has value + A-M->>+MS-M: swingStore.Set(key, value) + else no value + A-M->>+MS-M: swingStore.Delete(key) + end MS-M-->>-A-M: end CM-->>-A-M: @@ -247,8 +252,8 @@ sequenceDiagram SSEH-CS->>+D-CS: MkDir(exportDir) D-CS-->>-SSEH-CS: SSEH-CS->>+SSES-CS: provider.GetExportDataReader() - SSES-CS->>+MS-CS: ExportStorageFromPrefix
("swingStore.") - MS-CS-->>-SSES-CS: vstorage data entries + SSES-CS->>MS-CS: PrefixStore.Iterator()
("swingStore.") + MS-CS--)SSES-CS: sdk.Iterator SSES-CS--)-SSEH-CS: export data reader loop each data entry SSEH-CS->>+D-CS: Append(export-data.jsonl,
"JSON(entry tuple)\n") diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index b0b9c6b30cd..9668dfd37eb 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -479,11 +479,12 @@ func NewAgoricApp( getSwingStoreExportDataShadowCopyReader := func(height int64) agorictypes.KVEntryReader { ctx := app.NewUncachedContext(false, tmproto.Header{Height: height}) - exportDataEntries := app.SwingSetKeeper.ExportSwingStore(ctx) - if len(exportDataEntries) == 0 { + exportDataIterator := app.SwingSetKeeper.GetSwingStore(ctx).Iterator(nil, nil) + if !exportDataIterator.Valid() { + exportDataIterator.Close() return nil } - return agorictypes.NewVstorageDataEntriesReader(exportDataEntries) + return agorictypes.NewKVIteratorReader(exportDataIterator) } app.SwingSetSnapshotter = *swingsetkeeper.NewExtensionSnapshotter( bApp, @@ -816,6 +817,8 @@ func upgrade11Handler(app *GaiaApp, targetUpgrade string) func(sdk.Context, upgr // Record the plan to send to SwingSet app.upgradePlan = &plan + // TODO: Migrate x/vstorage swingStore to x/swingset SwingStore + // Always run module migrations mvm, err := app.mm.RunMigrations(ctx, app.configurator, fromVm) if err != nil { diff --git a/golang/cosmos/x/swingset/keeper/keeper.go b/golang/cosmos/x/swingset/keeper/keeper.go index 00f4191a8dd..29139e8e5fd 100644 --- a/golang/cosmos/x/swingset/keeper/keeper.go +++ b/golang/cosmos/x/swingset/keeper/keeper.go @@ -11,6 +11,7 @@ import ( "github.com/cosmos/cosmos-sdk/baseapp" "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" @@ -20,7 +21,6 @@ import ( "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragekeeper "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/keeper" - vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" ) // Top-level paths for chain storage should remain synchronized with @@ -37,7 +37,10 @@ const ( StoragePathSwingStore = "swingStore" ) -const stateKey string = "state" +const ( + stateKey = "state" + swingStoreKeyPrefix = "swingStore." +) // Contextual information about the message source of an action on an inbound queue. // This context should be unique per inboundQueueRecord. @@ -435,8 +438,9 @@ func (k Keeper) SetMailbox(ctx sdk.Context, peer string, mailbox string) { k.vstorageKeeper.LegacySetStorageAndNotify(ctx, agoric.NewKVEntry(path, mailbox)) } -func (k Keeper) ExportSwingStore(ctx sdk.Context) []*vstoragetypes.DataEntry { - return k.vstorageKeeper.ExportStorageFromPrefix(ctx, StoragePathSwingStore) +func (k Keeper) GetSwingStore(ctx sdk.Context) sdk.KVStore { + store := ctx.KVStore(k.storeKey) + return prefix.NewStore(store, []byte(swingStoreKeyPrefix)) } func (k Keeper) PathToEncodedKey(path string) []byte { diff --git a/golang/cosmos/x/swingset/keeper/keeper_test.go b/golang/cosmos/x/swingset/keeper/keeper_test.go index 15b2fc21775..643b8dd0b79 100644 --- a/golang/cosmos/x/swingset/keeper/keeper_test.go +++ b/golang/cosmos/x/swingset/keeper/keeper_test.go @@ -1,10 +1,17 @@ package keeper import ( + "fmt" + "reflect" "testing" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" + "github.com/cosmos/cosmos-sdk/store" + prefixstore "github.com/cosmos/cosmos-sdk/store/prefix" + storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" + + dbm "github.com/tendermint/tm-db" ) func mkcoin(denom string) func(amt int64) sdk.Coin { @@ -181,3 +188,73 @@ func Test_calculateFees(t *testing.T) { }) } } + +var ( + swingsetStoreKey = storetypes.NewKVStoreKey(types.StoreKey) +) + +func makeTestStore() sdk.KVStore { + db := dbm.NewMemDB() + ms := store.NewCommitMultiStore(db) + ms.MountStoreWithDB(swingsetStoreKey, sdk.StoreTypeIAVL, db) + err := ms.LoadLatestVersion() + if err != nil { + panic(err) + } + kvStore := ms.GetKVStore(swingsetStoreKey) + prefixStore := prefixstore.NewStore(kvStore, []byte("swingStore.")) + return prefixStore +} + +func TestSwingStore(t *testing.T) { + store := makeTestStore() + + // Test that we can store and retrieve a value. + store.Set([]byte("someKey"), []byte("someValue")) + if got := string(store.Get([]byte("someKey"))); got != "someValue" { + t.Errorf("got %q, want %q", got, "someValue") + } + + // Test that we can update and retrieve an updated value. + store.Set([]byte("someKey"), []byte("someNewValue")) + if got := string(store.Get([]byte("someKey"))); got != "someNewValue" { + t.Errorf("got %q, want %q", got, "someNewValue") + } + + // Test that we can store and retrieve empty values + store.Set([]byte("someEmptyKey"), []byte("")) + if got := store.Get([]byte("someEmptyKey")); got == nil || string(got) != "" { + t.Errorf("got %#v, want empty string", got) + } + + // Test that we can store and delete values. + store.Set([]byte("someOtherKey"), []byte("someOtherValue")) + store.Delete([]byte("someOtherKey")) + if store.Has([]byte("someOtherKey")) { + t.Errorf("has value, expected not") + } + + // Test that we can delete non existing keys (e.g. delete twice) + store.Delete([]byte("someMissingKey")) + + // Check the iterated values + expectedEntries := [][2]string{ + {"someEmptyKey", "[]byte{}"}, + {"someKey", "[]byte{0x73, 0x6f, 0x6d, 0x65, 0x4e, 0x65, 0x77, 0x56, 0x61, 0x6c, 0x75, 0x65}"}, + } + + iter := store.Iterator(nil, nil) + gotEntries := [][2]string{} + for ; iter.Valid(); iter.Next() { + entry := [2]string{ + string(iter.Key()), + fmt.Sprintf("%#v", iter.Value()), + } + gotEntries = append(gotEntries, entry) + } + iter.Close() + + if !reflect.DeepEqual(gotEntries, expectedEntries) { + t.Errorf("got export %q, want %q", gotEntries, expectedEntries) + } +} diff --git a/golang/cosmos/x/swingset/swingset.go b/golang/cosmos/x/swingset/swingset.go index 87af999fc5d..8281e938932 100644 --- a/golang/cosmos/x/swingset/swingset.go +++ b/golang/cosmos/x/swingset/swingset.go @@ -3,8 +3,11 @@ package swingset import ( "encoding/json" "fmt" + "io" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" + sdk "github.com/cosmos/cosmos-sdk/types" ) // portHandler implements vm.PortHandler @@ -18,6 +21,10 @@ type swingsetMessage struct { Args []json.RawMessage `json:"args"` } +const ( + SwingStoreUpdateExportData = "swingStoreUpdateExportData" +) + // NewPortHandler returns a port handler for a swingset Keeper. func NewPortHandler(k Keeper) vm.PortHandler { return portHandler{keeper: k} @@ -34,7 +41,31 @@ func (ph portHandler) Receive(ctx *vm.ControllerContext, str string) (string, er } switch msg.Method { + case SwingStoreUpdateExportData: + return ph.handleSwingStoreUpdateExportData(ctx.Context, msg.Args) + default: return "", fmt.Errorf("unrecognized swingset method %s", msg.Method) } } + +func (ph portHandler) handleSwingStoreUpdateExportData(ctx sdk.Context, entries []json.RawMessage) (ret string, err error) { + store := ph.keeper.GetSwingStore(ctx) + exportDataReader := agoric.NewJsonRawMessageKVEntriesReader(entries) + defer exportDataReader.Close() + for { + entry, err := exportDataReader.Read() + if err == io.EOF { + return "true", nil + } else if err != nil { + return ret, err + } + + key := []byte(entry.Key()) + if !entry.HasValue() { + store.Delete(key) + } else { + store.Set(key, []byte(entry.StringValue())) + } + } +} diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 9ed3e4ab362..8082db3dc67 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -298,20 +298,20 @@ export default async function main(progname, args, { env, homedir, agcc }) { if (typeof key !== 'string') { throw Fail`Unexpected swingStore exported key ${q(key)}`; } - const path = `${STORAGE_PATH.SWING_STORE}.${key}`; if (value == null) { - return [path]; + return [key]; } if (typeof value !== 'string') { throw Fail`Unexpected ${typeof value} value for swingStore exported key ${q( key, )}`; } - return [path, value]; + return [key, value]; }); - sendToChainStorage( + chainSend( + portNums.swingset, stringify({ - method: 'setWithoutNotify', + method: 'swingStoreUpdateExportData', args: entries, }), ); From 13078a2f3e6a586a81fa40dcb0387fcfde7b3c04 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Wed, 9 Aug 2023 02:01:06 +0000 Subject: [PATCH 092/109] refactor(x/swingset): switch to KVEntryReader for export data --- docs/architecture/state-sync.md | 4 +- golang/cosmos/app/app.go | 12 ++++-- .../swingset/keeper/extension_snapshotter.go | 38 +++++++++--------- .../keeper/swing_store_exports_handler.go | 40 ++++++------------- 4 files changed, 41 insertions(+), 53 deletions(-) diff --git a/docs/architecture/state-sync.md b/docs/architecture/state-sync.md index aa9f64f9fe4..06b860bea80 100644 --- a/docs/architecture/state-sync.md +++ b/docs/architecture/state-sync.md @@ -246,10 +246,10 @@ sequenceDiagram SSEH-CS->>SSEH-CS: activeOperation = operationDetails{} SSEH-CS->>+D-CS: MkDir(exportDir) D-CS-->>-SSEH-CS: - SSEH-CS->>+SSES-CS: provider.GetExportData() + SSEH-CS->>+SSES-CS: provider.GetExportDataReader() SSES-CS->>+MS-CS: ExportStorageFromPrefix
("swingStore.") MS-CS-->>-SSES-CS: vstorage data entries - SSES-CS-->>-SSEH-CS: + SSES-CS--)-SSEH-CS: export data reader loop each data entry SSEH-CS->>+D-CS: Append(export-data.jsonl,
"JSON(entry tuple)\n") D-CS-->>-SSEH-CS: diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 76208f7c976..07779ef0916 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -108,6 +108,7 @@ import ( gaiaappparams "github.com/Agoric/agoric-sdk/golang/cosmos/app/params" appante "github.com/Agoric/agoric-sdk/golang/cosmos/ante" + agorictypes "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/lien" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset" @@ -118,7 +119,6 @@ import ( vbanktypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vbank/types" "github.com/Agoric/agoric-sdk/golang/cosmos/x/vibc" "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage" - vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" // unnamed import of statik for swagger UI support _ "github.com/cosmos/cosmos-sdk/client/docs/statik" @@ -475,14 +475,18 @@ func NewAgoricApp( }, ) - getSwingStoreExportDataShadowCopy := func(height int64) []*vstoragetypes.DataEntry { + getSwingStoreExportDataShadowCopyReader := func(height int64) agorictypes.KVEntryReader { ctx := app.NewUncachedContext(false, tmproto.Header{Height: height}) - return app.SwingSetKeeper.ExportSwingStore(ctx) + exportDataEntries := app.SwingSetKeeper.ExportSwingStore(ctx) + if len(exportDataEntries) == 0 { + return nil + } + return agorictypes.NewVstorageDataEntriesReader(exportDataEntries) } app.SwingSetSnapshotter = *swingsetkeeper.NewExtensionSnapshotter( bApp, &app.SwingStoreExportsHandler, - getSwingStoreExportDataShadowCopy, + getSwingStoreExportDataShadowCopyReader, ) app.VibcKeeper = vibc.NewKeeper( diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index c7700cc84bd..8e4c1fc0f2e 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -9,7 +9,6 @@ import ( agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" - vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" "github.com/cosmos/cosmos-sdk/baseapp" snapshots "github.com/cosmos/cosmos-sdk/snapshots/types" "github.com/tendermint/tendermint/libs/log" @@ -65,26 +64,26 @@ type snapshotDetails struct { type ExtensionSnapshotter struct { isConfigured func() bool // takeAppSnapshot is called by OnExportStarted when creating a snapshot - takeAppSnapshot func(height int64) - swingStoreExportsHandler *SwingStoreExportsHandler - getSwingStoreExportDataShadowCopy func(height int64) []*vstoragetypes.DataEntry - logger log.Logger - activeSnapshot *snapshotDetails + takeAppSnapshot func(height int64) + swingStoreExportsHandler *SwingStoreExportsHandler + getSwingStoreExportDataShadowCopyReader func(height int64) agoric.KVEntryReader + logger log.Logger + activeSnapshot *snapshotDetails } // NewExtensionSnapshotter creates a new swingset ExtensionSnapshotter func NewExtensionSnapshotter( app *baseapp.BaseApp, swingStoreExportsHandler *SwingStoreExportsHandler, - getSwingStoreExportDataShadowCopy func(height int64) []*vstoragetypes.DataEntry, + getSwingStoreExportDataShadowCopyReader func(height int64) agoric.KVEntryReader, ) *ExtensionSnapshotter { return &ExtensionSnapshotter{ - isConfigured: func() bool { return app.SnapshotManager() != nil }, - takeAppSnapshot: app.Snapshot, - logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "extension snapshotter"), - swingStoreExportsHandler: swingStoreExportsHandler, - getSwingStoreExportDataShadowCopy: getSwingStoreExportDataShadowCopy, - activeSnapshot: nil, + isConfigured: func() bool { return app.SnapshotManager() != nil }, + takeAppSnapshot: app.Snapshot, + logger: app.Logger().With("module", fmt.Sprintf("x/%s", types.ModuleName), "submodule", "extension snapshotter"), + swingStoreExportsHandler: swingStoreExportsHandler, + getSwingStoreExportDataShadowCopyReader: getSwingStoreExportDataShadowCopyReader, + activeSnapshot: nil, } } @@ -242,14 +241,13 @@ func (snapshotter *ExtensionSnapshotter) OnExportRetrieved(provider SwingStoreEx } } - swingStoreExportDataEntries, err := provider.GetExportData() + exportDataReader, err := provider.GetExportDataReader() if err != nil { return err } - if len(swingStoreExportDataEntries) == 0 { + if exportDataReader == nil { return nil } - exportDataReader := agoric.NewVstorageDataEntriesReader(swingStoreExportDataEntries) defer exportDataReader.Close() // For debugging, write out any retrieved export data as a single untrusted artifact @@ -289,9 +287,9 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo // At this point the content of the cosmos DB has been verified against the // AppHash, which means the SwingStore data it contains can be used as the // trusted root against which to validate the artifacts. - getExportData := func() ([]*vstoragetypes.DataEntry, error) { - exportData := snapshotter.getSwingStoreExportDataShadowCopy(height) - return exportData, nil + getExportDataReader := func() (agoric.KVEntryReader, error) { + exportDataReader := snapshotter.getSwingStoreExportDataShadowCopyReader(height) + return exportDataReader, nil } readNextArtifact := func() (artifact types.SwingStoreArtifact, err error) { @@ -305,7 +303,7 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo } return snapshotter.swingStoreExportsHandler.RestoreExport( - SwingStoreExportProvider{BlockHeight: blockHeight, GetExportData: getExportData, ReadNextArtifact: readNextArtifact}, + SwingStoreExportProvider{BlockHeight: blockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}, SwingStoreRestoreOptions{IncludeHistorical: false}, ) } diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index b2aa4bacb7d..a0c34268102 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -12,7 +12,6 @@ import ( agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" "github.com/Agoric/agoric-sdk/golang/cosmos/vm" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" - vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" "github.com/tendermint/tendermint/libs/log" ) @@ -178,8 +177,8 @@ type SwingStoreExportOptions struct { // packages/swing-store/src/swingStore.js makeSwingStoreExporter ExportMode string `json:"exportMode,omitempty"` // A flag indicating whether "export data" should be part of the swing-store export - // If false, the resulting SwingStoreExportProvider's GetExportData will - // return an empty list of "export data" entries. + // If false, the resulting SwingStoreExportProvider's GetExportDataReader + // will return nil IncludeExportData bool `json:"includeExportData,omitempty"` } @@ -365,8 +364,9 @@ func checkNotActive() error { type SwingStoreExportProvider struct { // BlockHeight is the block height of the SwingStore export. BlockHeight uint64 - // GetExportData is a function to return the "export data" of the SwingStore export, if any. - GetExportData func() ([]*vstoragetypes.DataEntry, error) + // GetExportDataReader returns a KVEntryReader for the "export data" of the + // SwingStore export, or nil if the "export data" is not part of this export. + GetExportDataReader func() (agoric.KVEntryReader, error) // ReadNextArtifact is a function to return the next unread artifact in the SwingStore export. // It errors with io.EOF upon reaching the end of the list of available artifacts. ReadNextArtifact func() (types.SwingStoreArtifact, error) @@ -616,10 +616,9 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, blockHeight) } - getExportData := func() ([]*vstoragetypes.DataEntry, error) { - entries := []*vstoragetypes.DataEntry{} + getExportDataReader := func() (agoric.KVEntryReader, error) { if manifest.Data == "" { - return entries, nil + return nil, nil } dataFile, err := os.Open(filepath.Join(exportDir, manifest.Data)) @@ -627,21 +626,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return nil, err } exportDataReader := agoric.NewJsonlKVEntryDecoderReader(dataFile) - defer exportDataReader.Close() - - for { - entry, err := exportDataReader.Read() - if err == io.EOF { - return entries, nil - } else if err != nil { - return []*vstoragetypes.DataEntry{}, err - } - if !entry.HasValue() { - return []*vstoragetypes.DataEntry{}, fmt.Errorf("export data entry must have value") - } - dataEntry := vstoragetypes.DataEntry{Path: entry.Key(), Value: entry.StringValue()} - entries = append(entries, &dataEntry) - } + return exportDataReader, nil } nextArtifact := 0 @@ -667,7 +652,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return artifact, err } - err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportData: getExportData, ReadNextArtifact: readNextArtifact}) + err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}) if err != nil { return err } @@ -721,12 +706,14 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore BlockHeight: blockHeight, } - exportDataEntries, err := provider.GetExportData() + exportDataReader, err := provider.GetExportDataReader() if err != nil { return err } - if len(exportDataEntries) > 0 { + if exportDataReader != nil { + defer exportDataReader.Close() + manifest.Data = exportDataFilename exportDataFile, err := os.OpenFile(filepath.Join(exportDir, exportDataFilename), os.O_CREATE|os.O_WRONLY, exportedFilesMode) if err != nil { @@ -734,7 +721,6 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore } defer exportDataFile.Close() - exportDataReader := agoric.NewVstorageDataEntriesReader(exportDataEntries) err = agoric.EncodeKVEntryReaderToJsonl(exportDataReader, exportDataFile) if err != nil { return err From ee041db847975678e10b42ede8405210373fa5d6 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 11 Jul 2023 17:06:05 +0000 Subject: [PATCH 093/109] chore(x/vstorage): add remove entries method --- golang/cosmos/x/vstorage/keeper/keeper.go | 48 +++++++++++++++++++ .../cosmos/x/vstorage/keeper/keeper_test.go | 18 +++++++ 2 files changed, 66 insertions(+) diff --git a/golang/cosmos/x/vstorage/keeper/keeper.go b/golang/cosmos/x/vstorage/keeper/keeper.go index cc0e9d3298c..fb5a831fe8b 100644 --- a/golang/cosmos/x/vstorage/keeper/keeper.go +++ b/golang/cosmos/x/vstorage/keeper/keeper.go @@ -181,6 +181,54 @@ func (k Keeper) ImportStorage(ctx sdk.Context, entries []*types.DataEntry) { } } +func getEncodedKeysWithPrefixFromIterator(iterator sdk.Iterator, prefix string) [][]byte { + keys := make([][]byte, 0) + defer iterator.Close() + for ; iterator.Valid(); iterator.Next() { + key := iterator.Key() + path := types.EncodedKeyToPath(key) + if strings.HasPrefix(path, prefix) { + keys = append(keys, key) + } + } + return keys +} + +// RemoveEntriesWithPrefix removes all storage entries starting with the +// supplied pathPrefix, which may not be empty. +// It has the same effect as listing children of the prefix and removing each +// descendant recursively. +func (k Keeper) RemoveEntriesWithPrefix(ctx sdk.Context, pathPrefix string) { + store := ctx.KVStore(k.storeKey) + + if len(pathPrefix) == 0 { + panic("cannot remove all content") + } + if err := types.ValidatePath(pathPrefix); err != nil { + panic(err) + } + descendantPrefix := pathPrefix + types.PathSeparator + + // since vstorage encodes keys with a prefix indicating the number of path + // elements, we cannot use a simple prefix iterator. + // Instead we iterate over the whole vstorage content and check + // whether each entry matches the descendantPrefix. This choice assumes most + // entries will be deleted. An alternative implementation would be to + // recursively list all children under the descendantPrefix, and delete them. + + iterator := sdk.KVStorePrefixIterator(store, nil) + + keys := getEncodedKeysWithPrefixFromIterator(iterator, descendantPrefix) + + for _, key := range keys { + store.Delete(key) + } + + // Update the prefix entry itself with SetStorage, which will effectively + // delete it and all necessary ancestors. + k.SetStorage(ctx, agoric.NewKVEntryWithNoValue(pathPrefix)) +} + func (k Keeper) EmitChange(ctx sdk.Context, change *ProposedChange) { if change.NewValue == change.ValueFromLastBlock { // No change. diff --git a/golang/cosmos/x/vstorage/keeper/keeper_test.go b/golang/cosmos/x/vstorage/keeper/keeper_test.go index 120e707b64b..38fcdb7e7f6 100644 --- a/golang/cosmos/x/vstorage/keeper/keeper_test.go +++ b/golang/cosmos/x/vstorage/keeper/keeper_test.go @@ -185,7 +185,25 @@ func TestStorage(t *testing.T) { t.Errorf("got export %q, want %q", got, expectedKey2Export) } + keeper.RemoveEntriesWithPrefix(ctx, "key2.child2") + if keeper.HasEntry(ctx, "key2") { + t.Errorf("got leftover entries for key2 after removal") + } + expectedRemainingExport := []*types.DataEntry{ + {Path: "alpha2", Value: "value2"}, + {Path: "beta3", Value: "value3"}, + {Path: "inited", Value: ""}, + } + gotRemainingExport := keeper.ExportStorage(ctx) + if !reflect.DeepEqual(gotRemainingExport, expectedRemainingExport) { + t.Errorf("got remaining export %q, want %q", expectedRemainingExport, expectedRemainingExport) + } + keeper.ImportStorage(ctx, gotExport) + gotExport = keeper.ExportStorage(ctx) + if !reflect.DeepEqual(gotExport, expectedExport) { + t.Errorf("got export %q after import, want %q", gotExport, expectedExport) + } } func TestStorageNotify(t *testing.T) { From d46c8e2d270999ccba552e0072cb5e0926922f28 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Wed, 9 Aug 2023 09:02:53 -0700 Subject: [PATCH 094/109] fix(swing-store): add 'replay' artifactMode, make export more strict Previously, `makeSwingStoreExporter()` took a positional argument named `exportMode`, with values of 'current', 'archival', or 'debug'. This controlled how many artifacts were included in the export, on a best-effort basis (e.g. a DB whose old spans were pruned would emit the same artifacts with either 'current' or 'archival'). `importSwingStore()` took an options bag with both the `makeSwingStore` options (like `keepSnapshots` and `keepTranscripts`), and an import-specific `includeHistorical` boolean, which controlled which artifacts were processed by the import. This was also on a best-effort basis: `includeHistorical: true` on an export dataset that lacked old spans would produce the same (pruned) DB as `false`. This commit changes both APIs to take an options bag with a common `artifactMode` option, with values of `operational`, `replay`, `archival`, or `debug`. The `operational` choice replaces `current` and behaves the same way: just enough data for normal operations. The new `replay` choice 'operational' and 'archival', and selects all transcript spans for the current incarnation of each vat, but omits transcript spans for old incarnations: enough to perform a full vat-replay of the latest incarnation. Note: `makeSwingStoreExporter` was changed from a positional argument to an options bag, and no attempt was made to be compatible with old-style callers. During export, the mode is now strict: if the DB lacks the artifacts requested by the given mode, `makeSwingStoreExporter()` will throw an error, rather than emit fewer artifacts than desired. This means `artifactMode: 'replay'` will fail unless the DB being exported has all those old (current-incarnation) transcript items. And `archival` will fail unless the DB has the old incarnation spans too. The `debug` mode is best-effort, and emits everything available without the additional completeness checks. During import, the mode applies both an import filter and a completeness check. So exporting with `archival` but importing with `operational` will get you a pruned DB, lacking anything historical. Exporting with `operational` and importing with `replay` or `archival` will fail, because the newly-populated DB does not contain any historical artifacts. closes #8105 --- packages/swing-store/docs/data-export.md | 31 +-- packages/swing-store/src/assertComplete.js | 11 +- packages/swing-store/src/bundleStore.js | 7 +- packages/swing-store/src/exporter.js | 30 +-- packages/swing-store/src/importer.js | 30 +-- packages/swing-store/src/internal.js | 12 +- packages/swing-store/src/repairMetadata.js | 4 +- packages/swing-store/src/snapStore.js | 21 +- packages/swing-store/src/transcriptStore.js | 179 ++++++++++++------ packages/swing-store/src/types.d.ts | 19 +- packages/swing-store/test/test-export.js | 121 +++++++----- .../swing-store/test/test-exportImport.js | 73 +++++-- packages/swing-store/test/test-import.js | 123 +++++++----- .../swing-store/test/test-repair-metadata.js | 20 +- 14 files changed, 441 insertions(+), 240 deletions(-) diff --git a/packages/swing-store/docs/data-export.md b/packages/swing-store/docs/data-export.md index 2eae92bec7b..ecdecda6267 100644 --- a/packages/swing-store/docs/data-export.md +++ b/packages/swing-store/docs/data-export.md @@ -179,22 +179,31 @@ As a result, for each active vat, the first-stage Export Data contains a record The `openSwingStore()` function has an option named `keepTranscripts` (which defaults to `true`), which causes the transcriptStore to retain the old transcript items. A second option named `keepSnapshots` (which defaults to `false`) causes the snapStore to retain the old heap snapshots. Opening the swingStore with a `false` option does not necessarily delete the old items immediately, but they'll probably get deleted the next time the kernel triggers a heap snapshot or transcript-span rollover. Validators who care about minimizing their disk usage will want to set both to `false`. In the future, we will arrange the SwingStore SQLite tables to provide easy `sqlite3` CLI commands that will delete the old data, so validators can also periodically use the CLI command to prune it. -When exporting, the `makeSwingStoreExporter()` function takes an `exportMode=` argument. This serves to limit the set of artifacts that will be provided in the export. The defined values of `exportMode` are: -* `current`: include only the current transcript span and current snapshot for each vat: just the minimum set necessary for current operations -* `archival`: include all available transcript spans -* `debug`: include all available transcript spans *and* all available snapshots. The old snapshots are never necessary for normal operations, nor are they likely to be usefor for extreme upgrade scenarios, but they might be useful for some unusual debugging operation +When exporting, the `makeSwingStoreExporter()` function takes an `artifactMode` option (in an options bag). This serves to both limit, and provide some minimal guarantees about, the set of artifacts that will be provided in the export. The defined values of `artifactMode` each build upon the previous one: -Note that `exportMode` does not affect the Export Data generated by the exporter (if we *ever* want to validate this optional data, the hashes are mandatory). It only affects the names returned by `getArtifactNames()`: the list will be smaller for `current` than for `archival`. Re-exporting from a pruned copy will lack the old data, even if the re-export uses `archival`, because the second SwingStore cannot magically reconstruct the missing data. +* `operational`: include only the current transcript span and current snapshot for each vat: just the minimum set necessary for current operations +* `replay`: add all transcript spans for the current incarnation +* `archival`: add all available transcript spans, even for old incarnations +* `debug`: add all available snapshots, giving you everything. The old snapshots are never necessary for normal operations, nor are they likely to be useful for extreme upgrade scenarios, but they might be useful for some unusual debugging operations or investigations -Note that when a vat is terminated, we delete all information about it, including transcript items and snapshots, both current and old. This will remove all the Export Data records, and well as the matching artifacts from `getArtifactNames`. +For each mode, the export will fail if the data necessary for those artifacts is not available (e.g. it was previously pruned). For example, an export with `artifactMode: 'replay'` will fail unless every vat has all transcript entries for each one's current incarnation. The `archival` mode will fail to export unless every vat has *every* transcript entry, back to the very first incarnation. -When importing, the `importSwingStore()` function takes an options bag, which has property named `includeHistorical`. This property defaults to `false`, which makes the importer ignore any historical artifacts present in the export dataset. To import the historical transcript spans (and snapshots), you must set it to `true`. +However the `debug` export mode will never fail: it merely dumps everything in the swingstore, without limits or completeness checks. -So, to convey historical transcript spans from one swingstore to another, you must set three options along the way: +Note that `artifactMode` does not affect the Export Data generated by the exporter (because if we *ever* want to validate this optional data, the hashes are mandatory). It only affects the names returned by `getArtifactNames()`: `operational` returns a subset of `replay`, which returns a subset of `archival`. And re-exporting from a previously-pruned copy under `archival` mode will fail, because the second SwingStore cannot magically reconstruct the missing data. -* the original swingstore must be opened with `{ includeHistorical: true }`, otherwise the old spans will be pruned immediately -* the export must use `makeSwingStoreExporter(dirpath, 'archival')`, otherwise the export will omit the old spans -* the import must use `importSwingStore(exporter, dirPath, { includeHistorical: true })`, otherwide teh import will ignore the old spans +Also note that when a vat is terminated, we delete all information about it, including transcript items and snapshots, both current and old. This will remove all the Export Data records, and well as the matching artifacts from `getArtifactNames`. + +When importing, the `importSwingStore()` function's options bag takes a property named `artifactMode`, with the same meanings as for export. Importing with the `operational` mode will ignore any artifacts other than those needed for current operations, and will fail unless all such artifacts were available. Importing with `replay` will ignore spans from old incarnations, but will fail unless all spans from current incarnations are present. Importing with `archival` will fail unless all spans from all incarnations are present. There is no `debug` option during import. + +`importSwingStore()` returns a swingstore, which means its options bag also contains the same options as `openSwingStore()`, including the `keepTranscripts` option. This defaults to `true`, but if it were overridden to `false`, then the new swingstore will delete transcript spans as soon as they are no longer needed for operational purposes (e.g. when `transcriptStore.rolloverSpan()` is called). + +So, to avoid pruning current-incarnation historical transcript spans when exporting from one swingstore to another, you must set (or avoid overriding) the following options along the way: + +* the original swingstore must not be opened with `{ keepTranscripts: false }`, otherwise the old spans will be pruned immediately +* the export must use `makeSwingStoreExporter(dirpath, { artifactMode: 'replay'})`, otherwise the export will omit the old spans +* the import must use `importSwingStore(exporter, dirPath, { artifactMode: 'replay'})`, otherwise the import will ignore the old spans + * the `importSwingStore` call (and all subsequent `openSwingStore` calls) must not use `keepTranscripts: false`, otherwise the new swingstore will prune historical spans as new ones are created (during `rolloverSpan`). ## Implementation Details diff --git a/packages/swing-store/src/assertComplete.js b/packages/swing-store/src/assertComplete.js index 62b09465ea6..73a8d0f7292 100644 --- a/packages/swing-store/src/assertComplete.js +++ b/packages/swing-store/src/assertComplete.js @@ -1,20 +1,19 @@ /** * @param {import('./internal.js').SwingStoreInternal} internal - * @param {'operational'} level + * @param {Omit} checkMode * @returns {void} */ -export function assertComplete(internal, level) { - assert.equal(level, 'operational'); // only option for now +export function assertComplete(internal, checkMode) { // every bundle must be populated - internal.bundleStore.assertComplete(level); + internal.bundleStore.assertComplete(checkMode); // every 'isCurrent' transcript span must have all items // TODO: every vat with any data must have a isCurrent transcript // span - internal.transcriptStore.assertComplete(level); + internal.transcriptStore.assertComplete(checkMode); // every 'inUse' snapshot must be populated - internal.snapStore.assertComplete(level); + internal.snapStore.assertComplete(checkMode); // TODO: every isCurrent span that starts with load-snapshot has a // matching snapshot (counter-argument: swing-store should not know diff --git a/packages/swing-store/src/bundleStore.js b/packages/swing-store/src/bundleStore.js index 4b2de774ac3..2a6f0f20fd0 100644 --- a/packages/swing-store/src/bundleStore.js +++ b/packages/swing-store/src/bundleStore.js @@ -16,6 +16,7 @@ import { createSHA256 } from './hasher.js'; */ /** * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter + * @typedef { import('./internal.js').ArtifactMode } ArtifactMode * * @typedef {{ * addBundle: (bundleID: string, bundle: Bundle) => void; @@ -29,7 +30,7 @@ import { createSHA256 } from './hasher.js'; * repairBundleRecord: (key: string, value: string) => void, * importBundleRecord: (key: string, value: string) => void, * importBundle: (name: string, dataProvider: () => Promise) => Promise, - * assertComplete: (level: 'operational') => void, + * assertComplete: (checkMode: Omit) => void, * getExportRecords: () => IterableIterator, * getArtifactNames: () => AsyncIterableIterator, * getBundleIDs: () => IterableIterator, @@ -162,8 +163,8 @@ export function makeBundleStore(db, ensureTxn, noteExport = () => {}) { return sqlGetPrunedBundles.all(); } - function assertComplete(level) { - assert.equal(level, 'operational'); // for now + function assertComplete(checkMode) { + assert(checkMode !== 'debug', checkMode); const pruned = getPrunedBundles(); if (pruned.length) { throw Fail`missing bundles for: ${pruned.join(',')}`; diff --git a/packages/swing-store/src/exporter.js b/packages/swing-store/src/exporter.js index 47489454547..777340c4d0f 100644 --- a/packages/swing-store/src/exporter.js +++ b/packages/swing-store/src/exporter.js @@ -8,6 +8,8 @@ import { makeBundleStore } from './bundleStore.js'; import { makeSnapStore } from './snapStore.js'; import { makeSnapStoreIO } from './snapStoreIO.js'; import { makeTranscriptStore } from './transcriptStore.js'; +import { assertComplete } from './assertComplete.js'; +import { validateArtifactMode } from './internal.js'; /** * @template T @@ -53,7 +55,7 @@ import { makeTranscriptStore } from './transcriptStore.js'; * * Get a list of name of artifacts available from the swingStore. A name * returned by this method guarantees that a call to `getArtifact` on the same - * exporter instance will succeed. The `exportMode` option to + * exporter instance will succeed. The `artifactMode` option to * `makeSwingStoreExporter` controls the filtering of the artifact names * yielded. * @@ -75,22 +77,20 @@ import { makeTranscriptStore } from './transcriptStore.js'; */ /** - * @typedef {'current' | 'archival' | 'debug'} ExportMode + * @typedef { object } ExportSwingStoreOptions + * @property { import('./internal.js').ArtifactMode } [artifactMode] What artifacts should/must the exporter provide? */ /** * @param {string} dirPath - * @param { ExportMode } exportMode + * @param { ExportSwingStoreOptions } [options] * @returns {SwingStoreExporter} */ -export function makeSwingStoreExporter(dirPath, exportMode = 'current') { +export function makeSwingStoreExporter(dirPath, options = {}) { typeof dirPath === 'string' || Fail`dirPath must be a string`; - exportMode === 'current' || - exportMode === 'archival' || - exportMode === 'debug' || - Fail`invalid exportMode ${q(exportMode)}`; - const exportHistoricalSnapshots = exportMode === 'debug'; - const exportHistoricalTranscripts = exportMode !== 'current'; + const { artifactMode = 'operational' } = options; + validateArtifactMode(artifactMode); + const filePath = dbFileInDirectory(dirPath); const db = sqlite3(filePath); @@ -106,6 +106,12 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { const bundleStore = makeBundleStore(db, ensureTxn); const transcriptStore = makeTranscriptStore(db, ensureTxn, () => {}); + if (artifactMode !== 'debug') { + // throw early if this DB will not be able to create all the desired artifacts + const internal = { snapStore, bundleStore, transcriptStore }; + assertComplete(internal, artifactMode); + } + const sqlGetAllKVData = db.prepare(` SELECT key, value FROM kvStore @@ -132,8 +138,8 @@ export function makeSwingStoreExporter(dirPath, exportMode = 'current') { * @yields {string} */ async function* getArtifactNames() { - yield* snapStore.getArtifactNames(exportHistoricalSnapshots); - yield* transcriptStore.getArtifactNames(exportHistoricalTranscripts); + yield* snapStore.getArtifactNames(artifactMode); + yield* transcriptStore.getArtifactNames(artifactMode); yield* bundleStore.getArtifactNames(); } diff --git a/packages/swing-store/src/importer.js b/packages/swing-store/src/importer.js index 49cb854058a..a6cedb25e5d 100644 --- a/packages/swing-store/src/importer.js +++ b/packages/swing-store/src/importer.js @@ -2,11 +2,12 @@ import { Fail, q } from '@agoric/assert'; import { makeSwingStore } from './swingStore.js'; import { buffer } from './util.js'; +import { validateArtifactMode } from './internal.js'; import { assertComplete } from './assertComplete.js'; /** * @typedef { object } ImportSwingStoreOptions - * @property { boolean } [includeHistorical] Should the importer pay attention to historical artifacts? + * @property { import('./internal.js').ArtifactMode } [artifactMode] What artifacts should the importer use and require? */ /** @@ -16,15 +17,17 @@ import { assertComplete } from './assertComplete.js'; * * @param {import('./exporter').SwingStoreExporter} exporter * @param {string | null} [dirPath] - * @param {ImportSwingStoreOptions} options + * @param {ImportSwingStoreOptions} [options] * @returns {Promise} */ export async function importSwingStore(exporter, dirPath = null, options = {}) { if (dirPath && typeof dirPath !== 'string') { Fail`dirPath must be a string`; } - const { includeHistorical = false } = options; - const store = makeSwingStore(dirPath, true, options); + const { artifactMode = 'operational', ...makeSwingStoreOptions } = options; + validateArtifactMode(artifactMode); + + const store = makeSwingStore(dirPath, true, makeSwingStoreOptions); const { kernelStorage, internal } = store; // For every exportData entry, we add a DB record. 'kv' entries are @@ -81,11 +84,10 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { // All the metadata is now installed, and we're prepared for // artifacts. We walk `getArtifactNames()` and offer each one to the - // submodule, which ignores historical ones (unless - // 'includeHistorical' is true), and validates+accepts the - // rest. This is an initial import, so we don't need to check if we - // already have the data, but the submodule function is free to do - // that check if they want. + // submodule, which may ignore it according to `artifactMode`, but + // otherwise validates and accepts it. This is an initial import, so + // we don't need to check if we already have the data, but the + // submodule function is free to do such checks. for await (const name of exporter.getArtifactNames()) { const makeChunkIterator = () => exporter.getArtifact(name); @@ -98,13 +100,13 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { await internal.bundleStore.importBundle(name, dataProvider); } else if (tag === 'snapshot') { await internal.snapStore.populateSnapshot(name, makeChunkIterator, { - includeHistorical, + artifactMode, }); } else if (tag === 'transcript') { await internal.transcriptStore.populateTranscriptSpan( name, makeChunkIterator, - { includeHistorical }, + { artifactMode }, ); } else { Fail`unknown artifact type ${q(tag)} on import`; @@ -112,9 +114,11 @@ export async function importSwingStore(exporter, dirPath = null, options = {}) { } // We've installed all the artifacts that we could, now do a - // completeness check. + // completeness check. Enforce at least 'operational' completeness, + // even if the given mode was 'debug'. - assertComplete(internal, 'operational'); + const checkMode = artifactMode === 'debug' ? 'operational' : artifactMode; + assertComplete(internal, checkMode); await exporter.close(); return store; diff --git a/packages/swing-store/src/internal.js b/packages/swing-store/src/internal.js index e73a3d11732..18ece829261 100644 --- a/packages/swing-store/src/internal.js +++ b/packages/swing-store/src/internal.js @@ -1,3 +1,5 @@ +import { Fail, q } from '@agoric/assert'; + /** * @typedef { import('./snapStore').SnapStoreInternal } SnapStoreInternal * @typedef { import('./transcriptStore').TranscriptStoreInternal } TranscriptStoreInternal @@ -8,7 +10,13 @@ * snapStore: SnapStoreInternal, * bundleStore: BundleStoreInternal, * }} SwingStoreInternal + * + * @typedef {'operational' | 'replay' | 'archival' | 'debug'} ArtifactMode */ -// Ensure this is a module. -export {}; +export const artifactModes = ['operational', 'replay', 'archival', 'debug']; +export function validateArtifactMode(artifactMode) { + if (!artifactModes.includes(artifactMode)) { + Fail`invalid artifactMode ${q(artifactMode)}`; + } +} diff --git a/packages/swing-store/src/repairMetadata.js b/packages/swing-store/src/repairMetadata.js index dc82b0c2f2f..8ebc53fc4d7 100644 --- a/packages/swing-store/src/repairMetadata.js +++ b/packages/swing-store/src/repairMetadata.js @@ -60,6 +60,8 @@ export async function doRepairMetadata(internal, exporter) { } // and do a completeness check - assertComplete(internal, 'operational'); + /** @type { import('./internal.js').ArtifactMode } */ + const artifactMode = 'operational'; + assertComplete(internal, artifactMode); await exporter.close(); } diff --git a/packages/swing-store/src/snapStore.js b/packages/swing-store/src/snapStore.js index 0be13ee14c0..a201d3a5405 100644 --- a/packages/swing-store/src/snapStore.js +++ b/packages/swing-store/src/snapStore.js @@ -31,6 +31,7 @@ import { buffer } from './util.js'; /** * @typedef { import('./exporter').SwingStoreExporter } SwingStoreExporter + * @typedef { import('./internal.js').ArtifactMode } ArtifactMode * * @typedef {{ * loadSnapshot: (vatID: string) => AsyncIterableIterator, @@ -44,10 +45,10 @@ import { buffer } from './util.js'; * @typedef {{ * exportSnapshot: (name: string) => AsyncIterableIterator, * getExportRecords: (includeHistorical: boolean) => IterableIterator, - * getArtifactNames: (includeHistorical: boolean) => AsyncIterableIterator, + * getArtifactNames: (artifactMode: ArtifactMode) => AsyncIterableIterator, * importSnapshotRecord: (key: string, value: string) => void, - * populateSnapshot: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { includeHistorical: boolean }) => Promise, - * assertComplete: (level: 'operational') => void, + * populateSnapshot: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { artifactMode: ArtifactMode }) => Promise, + * assertComplete: (checkMode: Omit) => void, * repairSnapshotRecord: (key: string, value: string) => void, * }} SnapStoreInternal * @@ -481,11 +482,11 @@ export function makeSnapStore( } } - async function* getArtifactNames(includeHistorical) { + async function* getArtifactNames(artifactMode) { for (const rec of sqlGetAvailableSnapshots.iterate(1)) { yield snapshotArtifactName(rec); } - if (includeHistorical) { + if (artifactMode === 'debug') { for (const rec of sqlGetAvailableSnapshots.iterate(null)) { yield snapshotArtifactName(rec); } @@ -564,12 +565,12 @@ export function makeSnapStore( * @param {string} name Artifact name of the snapshot * @param {() => AnyIterableIterator} makeChunkIterator get an iterator of snapshot byte chunks * @param {object} options - * @param {boolean} options.includeHistorical + * @param {ArtifactMode} options.artifactMode * @returns {Promise} */ async function populateSnapshot(name, makeChunkIterator, options) { ensureTxn(); - const { includeHistorical } = options; + const { artifactMode } = options; const parts = name.split('.'); const [type, vatID, rawEndPos] = parts; // prettier-ignore @@ -580,7 +581,7 @@ export function makeSnapStore( sqlGetSnapshotHashFor.get(vatID, snapPos) || Fail`no metadata for snapshot ${name}`; - if (!metadata.inUse && !includeHistorical) { + if (!metadata.inUse && artifactMode !== 'debug') { return; // ignore old snapshots } @@ -617,8 +618,8 @@ export function makeSnapStore( `); sqlListPrunedCurrentSnapshots.pluck(); - function assertComplete(level) { - assert.equal(level, 'operational'); // for now + function assertComplete(checkMode) { + assert(checkMode !== 'debug', checkMode); // every 'inUse' snapshot must be populated const vatIDs = sqlListPrunedCurrentSnapshots.all(); if (vatIDs.length) { diff --git a/packages/swing-store/src/transcriptStore.js b/packages/swing-store/src/transcriptStore.js index 3ff700d0c0f..ee30af5d4d6 100644 --- a/packages/swing-store/src/transcriptStore.js +++ b/packages/swing-store/src/transcriptStore.js @@ -11,6 +11,8 @@ import { createSHA256 } from './hasher.js'; */ /** + * @typedef { import('./internal.js').ArtifactMode } ArtifactMode + * * @typedef {{ * initTranscript: (vatID: string) => void, * rolloverSpan: (vatID: string) => number, @@ -24,10 +26,10 @@ import { createSHA256 } from './hasher.js'; * @typedef {{ * exportSpan: (name: string) => AsyncIterableIterator * getExportRecords: (includeHistorical: boolean) => IterableIterator, - * getArtifactNames: (includeHistorical: boolean) => AsyncIterableIterator, + * getArtifactNames: (artifactMode: ArtifactMode) => AsyncIterableIterator, * importTranscriptSpanRecord: (key: string, value: string) => void, - * populateTranscriptSpan: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { includeHistorical: boolean }) => Promise, - * assertComplete: (level: 'operational') => void, + * populateTranscriptSpan: (name: string, makeChunkIterator: () => AnyIterableIterator, options: { artifactMode: ArtifactMode }) => Promise, + * assertComplete: (checkMode: Omit) => void, * repairTranscriptSpanRecord: (key: string, value: string) => void, * readFullVatTranscript: (vatID: string) => Iterable<{position: number, item: string}> * }} TranscriptStoreInternal @@ -331,6 +333,13 @@ export function makeTranscriptStore( ORDER BY vatID, startPos `); + const sqlGetIncarnationSpanMetadata = db.prepare(` + SELECT vatID, startPos, endPos, hash, isCurrent, incarnation + FROM transcriptSpans + WHERE vatID=? AND incarnation=? + ORDER BY vatID, startPos + `); + const sqlGetCurrentSpanMetadata = db.prepare(` SELECT vatID, startPos, endPos, hash, isCurrent, incarnation FROM transcriptSpans @@ -338,6 +347,11 @@ export function makeTranscriptStore( ORDER BY vatID, startPos `); + function dbRecToExportRec(dbRec) { + const { vatID, startPos, endPos, hash, isCurrent, incarnation } = dbRec; + return spanRec(vatID, startPos, endPos, hash, isCurrent, incarnation); + } + /** * Obtain artifact metadata records for spans contained in this store. * @@ -360,63 +374,77 @@ export function makeTranscriptStore( * replay will never be required or because such replay would be prohibitively * expensive regardless of need and therefor other repair strategies employed. * + * The only code path which could use 'false' would be `swingstore.dump()`, + * which takes the same flag. + * * @yields {readonly [key: string, value: string]} * @returns {IterableIterator} * An iterator over pairs of [spanMetadataKey, rec], where `rec` is a * JSON-encoded metadata record for the span named by `spanMetadataKey`. */ function* getExportRecords(includeHistorical = true) { - const sql = includeHistorical - ? sqlGetAllSpanMetadata - : sqlGetCurrentSpanMetadata; - for (const rec of sql.iterate()) { - const { vatID, startPos, endPos, hash, isCurrent, incarnation } = rec; - const exportRec = spanRec( - vatID, - startPos, - endPos, - hash, - isCurrent, - incarnation, - ); - yield [spanMetadataKey(rec), JSON.stringify(exportRec)]; + if (includeHistorical) { + for (const rec of sqlGetAllSpanMetadata.iterate()) { + yield [spanMetadataKey(rec), JSON.stringify(dbRecToExportRec(rec))]; + } + } else { + for (const rec of sqlGetCurrentSpanMetadata.iterate()) { + yield [spanMetadataKey(rec), JSON.stringify(dbRecToExportRec(rec))]; + } } } - // 'position' is not recycled across incarnations, so strictly - // speaking this query doesn't need to filter on 'incarnation = ?', - // but this will catch problems like items with incorrect or missing - // incarnation values - - const sqlCountPopulatedSpanItems = db.prepare(` + const sqlCountSpanItems = db.prepare(` SELECT COUNT(*) FROM transcriptItems - WHERE vatID = ? AND incarnation = ? AND position >= ? AND position < ? + WHERE vatID = ? AND position >= ? AND position < ? `); - sqlCountPopulatedSpanItems.pluck(); + sqlCountSpanItems.pluck(); /** * Obtain artifact names for spans contained in this store. * - * @param {boolean} includeHistorical If true, include all spans that are - * present in the store regardless of their currency; if false, only include - * the current span for each vat. - * + * @param {ArtifactMode} artifactMode Control which artifacts should be exported. + * At 'operational', only include current spans. At 'replay', + * include all spans of the current incarnation for each vat. At + * 'archival' and 'debug', include all spans. * @yields {string} * @returns {AsyncIterableIterator} An iterator over the names of all the artifacts requested */ - async function* getArtifactNames(includeHistorical) { - const sql = includeHistorical - ? sqlGetAllSpanMetadata - : sqlGetCurrentSpanMetadata; - for (const rec of sql.iterate()) { - const { vatID, incarnation, startPos, endPos } = rec; - if ( - !sqlCountPopulatedSpanItems.get(vatID, incarnation, startPos, endPos) - ) { - continue; + async function* getArtifactNames(artifactMode) { + // for all non-'debug' modes, the exporter asserts that all + // requested items are present (i.e. the artifacts will be + // complete), so we don't need to check that ourselves + if (artifactMode === 'operational') { + for (const rec of sqlGetCurrentSpanMetadata.iterate()) { + yield spanArtifactName(rec); + } + } else if (artifactMode === 'replay') { + for (const curRec of sqlGetCurrentSpanMetadata.iterate()) { + const { vatID, incarnation } = curRec; + for (const rec of sqlGetIncarnationSpanMetadata.iterate( + vatID, + incarnation, + )) { + yield spanArtifactName(rec); + } + } + } else if (artifactMode === 'archival') { + // everything + for (const rec of sqlGetAllSpanMetadata.iterate()) { + yield spanArtifactName(rec); + } + } else if (artifactMode === 'debug') { + // everything that is a complete span + for (const rec of sqlGetAllSpanMetadata.iterate()) { + const { vatID, startPos, endPos } = rec; + const count = sqlCountSpanItems.get(vatID, startPos, endPos); + if (count !== endPos - startPos) { + // skip incomplete spans, because the exporter did not + // already do a completeness check in 'debug' mode + continue; + } + yield spanArtifactName(rec); } - - yield spanArtifactName(rec); } } @@ -590,19 +618,27 @@ export function makeTranscriptStore( WHERE vatID = ? AND startPos = ? `); + const sqlGetStartOfIncarnation = db.prepare(` + SELECT startPos + FROM transcriptSpans + WHERE vatID=? AND incarnation=? + ORDER BY startPos ASC LIMIT 1 + `); + sqlGetStartOfIncarnation.pluck(); + /** * Import a transcript span from another store. * * @param {string} name Artifact Name of the transcript span * @param {() => AnyIterableIterator} makeChunkIterator get an iterator of transcript byte chunks * @param {object} options - * @param {boolean} options.includeHistorical + * @param {ArtifactMode} options.artifactMode * * @returns {Promise} */ async function populateTranscriptSpan(name, makeChunkIterator, options) { ensureTxn(); - const { includeHistorical } = options; + const { artifactMode } = options; const parts = name.split('.'); const [type, vatID, rawStartPos, rawEndPos] = parts; // prettier-ignore @@ -614,11 +650,23 @@ export function makeTranscriptStore( const metadata = sqlGetSpanMetadataFor.get(vatID, startPos) || Fail`no metadata for transcript span ${name}`; - if (!metadata.isCurrent && !includeHistorical) { - return; // ignore old spans - } assert.equal(metadata.endPos, endPos); + if (artifactMode === 'operational') { + if (!metadata.isCurrent) { + return; // ignore old spans + } + } + if (artifactMode === 'replay') { + // ignore spans that aren't for the current incarnation + const { incarnation } = sqlGetCurrentSpanBounds.get(vatID); + const incStart = sqlGetStartOfIncarnation.get(vatID, incarnation); + if (startPos < incStart) { + return; + } + } + // 'archival' and 'debug' modes accept all spans + const artifactChunks = await makeChunkIterator(); const inStream = Readable.from(artifactChunks); const lineTransform = new BufferLineTransform(); @@ -679,19 +727,36 @@ export function makeTranscriptStore( } } - function assertComplete(level) { - assert.equal(level, 'operational'); // for now - // every 'isCurrent' transcript span must have all items + function assertComplete(checkMode) { + assert(checkMode !== 'debug', checkMode); for (const rec of sqlGetCurrentSpanMetadata.iterate()) { const { vatID, startPos, endPos, incarnation } = rec; - const count = sqlCountPopulatedSpanItems.get( - vatID, - incarnation, - startPos, - endPos, - ); - if (count !== endPos - startPos) { - throw Fail`incomplete current transcript span: ${count} items, ${rec}`; + + if (checkMode === 'operational') { + // at 'operational', every 'isCurrent' transcript span must + // have all items + const count = sqlCountSpanItems.get(vatID, startPos, endPos); + if (count !== endPos - startPos) { + throw Fail`incomplete current transcript span: ${count} items, ${rec}`; + } + } else if (checkMode === 'replay') { + // at 'replay', every vat's current incarnation must be fully + // populated (which implies 'operational') + const incStart = sqlGetStartOfIncarnation.get(vatID, incarnation); + const incCount = sqlCountSpanItems.get(vatID, incStart, endPos); + if (incCount !== endPos - incStart) { + throw Fail`incomplete current incarnation transcript: ${incCount} items`; + } + } else if (checkMode === 'archival') { + // at 'archival', every incarnation must be fully populated, + // which means position=0 up through endPos-1 (which implies + // 'replay') + const arcCount = sqlCountSpanItems.get(vatID, 0, endPos); + if (arcCount !== endPos) { + throw Fail`incomplete archival transcript: ${arcCount} vs ${endPos}`; + } + } else { + throw Fail`unknown checkMode ${checkMode}`; } } } diff --git a/packages/swing-store/src/types.d.ts b/packages/swing-store/src/types.d.ts index b1697d3d323..e8f9d7d84fd 100644 --- a/packages/swing-store/src/types.d.ts +++ b/packages/swing-store/src/types.d.ts @@ -2,13 +2,14 @@ export type { SwingStore, SwingStoreKernelStorage, SwingStoreHostStorage, -} from './src/swingStore.js'; -export type { KVStore } from './src/kvStore.js'; -export type { BundleStore } from './src/bundleStore.js'; +} from './swingStore.js'; +export type { KVStore } from './kvStore.js'; +export type { BundleStore } from './bundleStore.js'; +export type { SnapStore, SnapshotResult, SnapshotInfo } from './snapStore.js'; +export type { TranscriptStore } from './transcriptStore.js'; +export type { ArtifactMode } from './internal.js'; +export type { ImportSwingStoreOptions } from './importer.js'; export type { - SnapStore, - SnapshotResult, - SnapshotInfo, -} from './src/snapStore.js'; -export type { TranscriptStore } from './src/transcriptStore.js'; -export type { SwingStoreExporter, ExportMode } from './src/exporter.js'; + SwingStoreExporter, + ExportSwingStoreOptions, +} from './exporter.js'; diff --git a/packages/swing-store/test/test-export.js b/packages/swing-store/test/test-export.js index 611a0c0d497..fae8e295c00 100644 --- a/packages/swing-store/test/test-export.js +++ b/packages/swing-store/test/test-export.js @@ -7,6 +7,14 @@ import { initSwingStore, makeSwingStoreExporter } from '../src/index.js'; import { tmpDir, getSnapshotStream, makeB0ID } from './util.js'; +const rank = { + operational: 1, + replay: 2, + archival: 3, + debug: 4, + 'debug-on-pruned': 4, +}; + const snapshotData = 'snapshot data'; // this snapHash was computed manually const snapHash = @@ -28,48 +36,57 @@ const exportTest = test.macro(async (t, mode) => { const ss1 = initSwingStore(dbDir, options); const ks = ss1.kernelStorage; - // build a DB with three spans (only one inUse) and two snapshots - // (same) + // build a DB with four spans (one in an old incarnation, two + // historical but current incarnation, only one inUse) and two + // snapshots (only one inUSe) ks.kvStore.set('key1', 'value1'); ks.bundleStore.addBundle(bundle0ID, bundle0); ks.transcriptStore.initTranscript('v1'); + // incarnation 0 ks.transcriptStore.addItem('v1', 'start-worker'); // 0 - ks.transcriptStore.addItem('v1', 'delivery1'); // 1 - await ks.snapStore.saveSnapshot('v1', 2, getSnapshotStream(snapshotData)); - ks.transcriptStore.addItem('v1', 'save-snapshot'); // 2 - ks.transcriptStore.rolloverSpan('v1'); // range= 0..3 + ks.transcriptStore.addItem('v1', 'shutdown-worker'); // 1 + ks.transcriptStore.rolloverIncarnation('v1'); + const spanHash0 = + '5bee0f44eca02f23eab03703e84ed2647d5d117fed99e1c30a3b424b7f082ab9'; + + // incarnation 1 + ks.transcriptStore.addItem('v1', 'start-worker'); // 2 + ks.transcriptStore.addItem('v1', 'delivery1'); // 3 + await ks.snapStore.saveSnapshot('v1', 4, getSnapshotStream(snapshotData)); + ks.transcriptStore.addItem('v1', 'save-snapshot'); // 4 + ks.transcriptStore.rolloverSpan('v1'); // range= 2..5 const spanHash1 = '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; - ks.transcriptStore.addItem('v1', 'load-snapshot'); // 3 - ks.transcriptStore.addItem('v1', 'delivery2'); // 4 - await ks.snapStore.saveSnapshot('v1', 5, getSnapshotStream(snapshotData)); - ks.transcriptStore.addItem('v1', 'save-snapshot'); // 5 - ks.transcriptStore.rolloverSpan('v1'); // range= 3..6 + ks.transcriptStore.addItem('v1', 'load-snapshot'); // 5 + ks.transcriptStore.addItem('v1', 'delivery2'); // 6 + await ks.snapStore.saveSnapshot('v1', 7, getSnapshotStream(snapshotData)); + ks.transcriptStore.addItem('v1', 'save-snapshot'); // 7 + ks.transcriptStore.rolloverSpan('v1'); // range= 5..8 const spanHash2 = '1947001e78e01bd1e773feb22b4ffc530447373b9de9274d5d5fbda3f23dbf2b'; - ks.transcriptStore.addItem('v1', 'load-snapshot'); // 6 - ks.transcriptStore.addItem('v1', 'delivery3'); // 7 + ks.transcriptStore.addItem('v1', 'load-snapshot'); // 8 + ks.transcriptStore.addItem('v1', 'delivery3'); // 9 const spanHash3 = 'e6b42c6a3fb94285a93162f25a9fc0145fd4c5bb144917dc572c50ae2d02ee69'; - // current range= 6..8 + // current range= 8..10 - ss1.hostStorage.commit(); + await ss1.hostStorage.commit(); // create an export, and assert that the pieces match what we - // expect. exportMode='current' means we get all metadata, no + // expect. artifactMode='operational' means we get all metadata, no // historical transcript spans, and no historical snapshots assert.typeof(mode, 'string'); - /** @typedef {import('../src/exporter.js').ExportMode} ExportMode */ - let exportMode = /** @type {ExportMode} */ (mode); + /** @typedef {import('../src/internal.js').ArtifactMode} ArtifactMode */ + let artifactMode = /** @type {ArtifactMode} */ (mode); if (mode === 'debug-on-pruned') { - exportMode = 'debug'; + artifactMode = 'debug'; } - const exporter = makeSwingStoreExporter(dbDir, exportMode); + const exporter = makeSwingStoreExporter(dbDir, { artifactMode }); // exportData { @@ -90,36 +107,46 @@ const exportTest = test.macro(async (t, mode) => { }; check('kv.key1', 'value1'); - check('snapshot.v1.2', { + check('snapshot.v1.4', { vatID: 'v1', - snapPos: 2, + snapPos: 4, inUse: 0, hash: snapHash, }); - check('snapshot.v1.5', { + check('snapshot.v1.7', { vatID: 'v1', - snapPos: 5, + snapPos: 7, inUse: 1, hash: snapHash, }); - check('snapshot.v1.current', 'snapshot.v1.5'); - const base = { vatID: 'v1', incarnation: 0, isCurrent: 0 }; + check('snapshot.v1.current', 'snapshot.v1.7'); + const base = { vatID: 'v1', isCurrent: 0 }; check('transcript.v1.0', { ...base, + incarnation: 0, startPos: 0, - endPos: 3, + endPos: 2, + hash: spanHash0, + }); + check('transcript.v1.2', { + ...base, + incarnation: 1, + startPos: 2, + endPos: 5, hash: spanHash1, }); - check('transcript.v1.3', { + check('transcript.v1.5', { ...base, - startPos: 3, - endPos: 6, + incarnation: 1, + startPos: 5, + endPos: 8, hash: spanHash2, }); check('transcript.v1.current', { ...base, - startPos: 6, - endPos: 8, + incarnation: 1, + startPos: 8, + endPos: 10, isCurrent: 1, hash: spanHash3, }); @@ -153,29 +180,35 @@ const exportTest = test.macro(async (t, mode) => { t.deepEqual(data, expected); }; - // export mode 'current' means we omit historical snapshots and + // export mode 'operational' means we omit historical snapshots and // transcript spans - await check('snapshot.v1.5', 'snapshot data'); - await check('transcript.v1.6.8', 'load-snapshot\ndelivery3\n'); + await check('snapshot.v1.7', 'snapshot data'); + await check('transcript.v1.8.10', 'load-snapshot\ndelivery3\n'); await check(`bundle.${bundle0ID}`, bundle0); - if (mode === 'archival' || mode === 'debug' || mode === 'debug-on-pruned') { - // adds the old transcript spans + t.true(rank[mode] > 0); + if (rank[mode] >= rank.replay) { + // add the old transcript spans of the current incarnation await check( - 'transcript.v1.0.3', + 'transcript.v1.2.5', 'start-worker\ndelivery1\nsave-snapshot\n', ); await check( - 'transcript.v1.3.6', + 'transcript.v1.5.8', 'load-snapshot\ndelivery2\nsave-snapshot\n', ); } + if (rank[mode] >= rank.archival) { + // add the spans of the old incarnation + await check('transcript.v1.0.2', 'start-worker\nshutdown-worker\n'); + } + if (mode === 'debug') { // adds the old snapshots, which are only present if // initSwingStore() was given {keepSnapshots: true} - await check('snapshot.v1.2', 'snapshot data'); + await check('snapshot.v1.4', 'snapshot data'); // mode='debug-on-pruned' exercises the keepSnapshots:false case } @@ -187,7 +220,8 @@ const exportTest = test.macro(async (t, mode) => { } }); -test('export current', exportTest, 'current'); +test('export operational', exportTest, 'operational'); +test('export replay', exportTest, 'replay'); test('export archival', exportTest, 'archival'); test('export debug', exportTest, 'debug'); test('export debug-on-pruned', exportTest, 'debug-on-pruned'); @@ -220,9 +254,10 @@ test('export omits pruned span artifacts', async t => { const spanHash2 = 'b26c8faf425c3c2738e0c5a5e9a7cd71075c68f0c9f2d6cdfd83c68204801dbb'; - ss1.hostStorage.commit(); + await ss1.hostStorage.commit(); - const exporter = makeSwingStoreExporter(dbDir, 'archival'); + const artifactMode = 'debug'; + const exporter = makeSwingStoreExporter(dbDir, { artifactMode }); // exportData { diff --git a/packages/swing-store/test/test-exportImport.js b/packages/swing-store/test/test-exportImport.js index ca4ffb71e2c..20fb5d0dc7b 100644 --- a/packages/swing-store/test/test-exportImport.js +++ b/packages/swing-store/test/test-exportImport.js @@ -126,7 +126,8 @@ test('crank abort leaves no debris in export log', async t => { await ssOut.hostStorage.commit(); } - const exporter = makeSwingStoreExporter(dbDir, 'current'); + const artifactMode = 'operational'; + const exporter = makeSwingStoreExporter(dbDir, { artifactMode }); const exportData = []; for await (const elem of exporter.getExportData()) { @@ -175,12 +176,13 @@ async function testExportImport( exportMode, importMode, expectedArtifactNames, + failureMode = 'none', ) { const exportLog = makeExportLog(); const [dbDir, cleanup] = await tmpDir('testdb'); t.teardown(cleanup); - const keepTranscripts = runMode !== 'current'; + const keepTranscripts = runMode !== 'operational'; const keepSnapshots = runMode === 'debug'; const ssOut = initSwingStore(dbDir, { exportCallback: exportLog.callback, @@ -223,7 +225,15 @@ async function testExportImport( await ssOut.hostStorage.commit(); } - const exporter = makeSwingStoreExporter(dbDir, exportMode); + const incomplete = 'incomplete archival transcript: 3 vs 12'; + function doExport() { + return makeSwingStoreExporter(dbDir, { artifactMode: exportMode }); + } + if (failureMode === 'export') { + await t.throws(doExport, { message: incomplete }); + return; + } + const exporter = doExport(); const exportData = []; for await (const elem of exporter.getExportData()) { @@ -307,13 +317,36 @@ async function testExportImport( } t.deepEqual(artifactNames, expectedArtifactNames); - const includeHistorical = importMode !== 'current'; - const beforeDump = debug.dump(keepSnapshots); - const ssIn = await importSwingStore(exporter, null, { includeHistorical }); + function doImport() { + return importSwingStore(exporter, null, { artifactMode: importMode }); + } + + if (failureMode === 'import') { + await t.throwsAsync(doImport, { message: incomplete }); + return; + } + t.is(failureMode, 'none'); + const ssIn = await doImport(); await ssIn.hostStorage.commit(); - const dumpsShouldMatch = - runMode !== 'debug' || (exportMode === 'debug' && importMode !== 'current'); + let dumpsShouldMatch = true; + if (runMode === 'operational') { + dumpsShouldMatch = true; // there's no data to lose + } else if (runMode === 'archival') { + if (exportMode === 'current') { + dumpsShouldMatch = false; // export omits some data + } + if (importMode === 'current') { + dumpsShouldMatch = false; // import ignores some data + } + } else if (runMode === 'debug') { + if (exportMode !== 'debug') { + dumpsShouldMatch = false; // export omits some data + } + if (importMode !== 'debug') { + dumpsShouldMatch = false; // import ignores some data + } + } if (dumpsShouldMatch) { const afterDump = ssIn.debug.dump(keepSnapshots); t.deepEqual(beforeDump, afterDump); @@ -348,35 +381,43 @@ const expectedDebugArtifacts = [ 'transcript.vatB.5.10', ]; -const C = 'current'; +const C = 'operational'; // nee 'current' +// we don't try to test 'replay' here: see test-import.js and test-export.js const A = 'archival'; const D = 'debug'; +// importMode='archival' requires a non-pruned DB +// (runMode!=='current'), with exportMode as 'archival' or 'debug' + +// the expected artifacts are a function of the runMode and exportMode, not importMode + test('export and import data for state sync - current->current->current', async t => { await testExportImport(t, C, C, C, expectedCurrentArtifacts); }); +// so this one fails during import test('export and import data for state sync - current->current->archival', async t => { - await testExportImport(t, C, C, A, expectedCurrentArtifacts); + await testExportImport(t, C, C, A, expectedCurrentArtifacts, 'import'); }); test('export and import data for state sync - current->current->debug', async t => { await testExportImport(t, C, C, D, expectedCurrentArtifacts); }); +// these all throw an error during export, because 'archival' requires a non-pruned DB test('export and import data for state sync - current->archival->current', async t => { - await testExportImport(t, C, A, C, expectedCurrentArtifacts); + await testExportImport(t, C, A, C, [], 'export'); }); test('export and import data for state sync - current->archival->archival', async t => { - await testExportImport(t, C, A, A, expectedCurrentArtifacts); + await testExportImport(t, C, A, A, [], 'export'); }); test('export and import data for state sync - current->archival->debug', async t => { - await testExportImport(t, C, A, D, expectedCurrentArtifacts); + await testExportImport(t, C, A, D, [], 'export'); }); test('export and import data for state sync - current->debug->current', async t => { await testExportImport(t, C, D, C, expectedCurrentArtifacts); }); test('export and import data for state sync - current->debug->archival', async t => { - await testExportImport(t, C, D, A, expectedCurrentArtifacts); + await testExportImport(t, C, D, A, expectedCurrentArtifacts, 'import'); }); test('export and import data for state sync - current->debug->debug', async t => { await testExportImport(t, C, D, D, expectedCurrentArtifacts); @@ -388,7 +429,7 @@ test('export and import data for state sync - archival->current->current', async await testExportImport(t, A, C, C, expectedCurrentArtifacts); }); test('export and import data for state sync - archival->current->archival', async t => { - await testExportImport(t, A, C, A, expectedCurrentArtifacts); + await testExportImport(t, A, C, A, expectedCurrentArtifacts, 'import'); }); test('export and import data for state sync - archival->current->debug', async t => { await testExportImport(t, A, C, D, expectedCurrentArtifacts); @@ -420,7 +461,7 @@ test('export and import data for state sync - debug->current->current', async t await testExportImport(t, D, C, C, expectedCurrentArtifacts); }); test('export and import data for state sync - debug->current->archival', async t => { - await testExportImport(t, D, C, A, expectedCurrentArtifacts); + await testExportImport(t, D, C, A, expectedCurrentArtifacts, 'import'); }); test('export and import data for state sync - debug->current->debug', async t => { await testExportImport(t, D, C, D, expectedCurrentArtifacts); diff --git a/packages/swing-store/test/test-import.js b/packages/swing-store/test/test-import.js index ccdfde23446..3711b8beb60 100644 --- a/packages/swing-store/test/test-import.js +++ b/packages/swing-store/test/test-import.js @@ -16,6 +16,13 @@ import { importSwingStore, makeSwingStoreExporter } from '../src/index.js'; import { tmpDir, makeB0ID } from './util.js'; +const rank = { + operational: 1, + replay: 2, + archival: 3, + debug: 4, +}; + const snapshotData = 'snapshot data'; // this snapHash was computed manually const snapHash = @@ -74,7 +81,7 @@ test('import empty', async t => { t.teardown(cleanup); const exporter = makeExporter(new Map(), new Map()); const ss = await importSwingStore(exporter, dbDir); - ss.hostStorage.commit(); + await ss.hostStorage.commit(); const data = convert(ss.debug.dump()); t.deepEqual(data, { kvEntries: {}, @@ -98,60 +105,66 @@ export function buildData() { exportData.set(`bundle.${bundle0ID}`, bundle0ID); const sbase = { vatID: 'v1', hash: snapHash, inUse: 0 }; - const tbase = { vatID: 'v1', startPos: 0, isCurrent: 0, incarnation: 0 }; + const tbase = { vatID: 'v1', startPos: 0, isCurrent: 0, incarnation: 1 }; const addTS = (key, obj) => exportData.set(key, JSON.stringify({ ...tbase, ...obj })); const t0hash = + '5bee0f44eca02f23eab03703e84ed2647d5d117fed99e1c30a3b424b7f082ab9'; + const t2hash = '57152efdd7fdf75c03371d2b4f1088d5bf3eae7fe643babce527ff81df38998c'; - const t3hash = + const t5hash = '1947001e78e01bd1e773feb22b4ffc530447373b9de9274d5d5fbda3f23dbf2b'; - const t6hash = + const t8hash = 'e6b42c6a3fb94285a93162f25a9fc0145fd4c5bb144917dc572c50ae2d02ee69'; - addTS(`transcript.v1.0`, { endPos: 3, hash: t0hash }); + addTS(`transcript.v1.0`, { incarnation: 0, endPos: 2, hash: t0hash }); + artifacts.set(`transcript.v1.0.2`, 'start-worker\nshutdown-worker\n'); + + addTS(`transcript.v1.2`, { startPos: 2, endPos: 5, hash: t2hash }); artifacts.set( - `transcript.v1.0.3`, + `transcript.v1.2.5`, 'start-worker\ndelivery1\nsave-snapshot\n', ); - exportData.set(`snapshot.v1.2`, JSON.stringify({ ...sbase, snapPos: 2 })); - artifacts.set(`snapshot.v1.2`, snapshotData); + exportData.set(`snapshot.v1.4`, JSON.stringify({ ...sbase, snapPos: 4 })); + artifacts.set(`snapshot.v1.4`, snapshotData); - addTS(`transcript.v1.3`, { startPos: 3, endPos: 6, hash: t3hash }); + addTS(`transcript.v1.5`, { startPos: 5, endPos: 8, hash: t5hash }); artifacts.set( - 'transcript.v1.3.6', + 'transcript.v1.5.8', 'load-snapshot\ndelivery2\nsave-snapshot\n', ); exportData.set( - `snapshot.v1.5`, - JSON.stringify({ ...sbase, snapPos: 5, inUse: 1 }), + `snapshot.v1.7`, + JSON.stringify({ ...sbase, snapPos: 7, inUse: 1 }), ); - artifacts.set(`snapshot.v1.5`, snapshotData); + artifacts.set(`snapshot.v1.7`, snapshotData); - artifacts.set('transcript.v1.6.8', 'load-snapshot\ndelivery3\n'); - exportData.set(`snapshot.v1.current`, 'snapshot.v1.5'); + artifacts.set('transcript.v1.8.10', 'load-snapshot\ndelivery3\n'); + exportData.set(`snapshot.v1.current`, 'snapshot.v1.7'); addTS(`transcript.v1.current`, { - startPos: 6, - endPos: 8, + startPos: 8, + endPos: 10, isCurrent: 1, - hash: t6hash, + hash: t8hash, }); - return { exportData, artifacts, t0hash, t3hash, t6hash }; + return { exportData, artifacts, t0hash, t2hash, t5hash, t8hash }; } const importTest = test.macro(async (t, mode) => { + /** @typedef {import('../src/internal.js').ArtifactMode} ArtifactMode */ + const artifactMode = /** @type {ArtifactMode} */ (mode); + const [dbDir, cleanup] = await tmpDir('testdb'); t.teardown(cleanup); - const { exportData, artifacts, t0hash, t3hash, t6hash } = buildData(); + const { exportData, artifacts, t0hash, t2hash, t5hash, t8hash } = buildData(); const exporter = makeExporter(exportData, artifacts); // now import - const includeHistorical = mode === 'historical'; - const options = { includeHistorical }; - const ss = await importSwingStore(exporter, dbDir, options); - ss.hostStorage.commit(); + const ss = await importSwingStore(exporter, dbDir, { artifactMode }); + await ss.hostStorage.commit(); const data = convert(ss.debug.dump()); const convertTranscript = (items, startPos = 0) => { @@ -187,31 +200,43 @@ const importTest = test.macro(async (t, mode) => { t.deepEqual(data.kvEntries, { key1: 'value1' }); let ts = []; - let tsStart = 6; // start of current span - if (mode === 'historical') { - tsStart = 0; // historical means we get all spans - ts = ts.concat(['start-worker', 'delivery1', 'save-snapshot']); // 0,1,2 - ts = ts.concat(['load-snapshot', 'delivery2', 'save-snapshot']); // 3,4,5 + if (rank[artifactMode] >= rank.archival) { + // only 'archival' and 'debug' get the old incarnation's span + ts = ts.concat(['start-worker', 'shutdown-worker']); // 0,1 + } + if (rank[artifactMode] >= rank.replay) { + // those, or 'replay', get the current incarnation's old spans + ts = ts.concat(['start-worker', 'delivery1', 'save-snapshot']); // 2,3,4 + ts = ts.concat(['load-snapshot', 'delivery2', 'save-snapshot']); // 5,6,7 + } + ts = ts.concat(['load-snapshot', 'delivery3']); // 8,9 + + let tsStart; + if (artifactMode === 'archival' || artifactMode === 'debug') { + tsStart = 0; + } else if (artifactMode === 'replay') { + tsStart = 2; + } else { + tsStart = 8; } - ts = ts.concat(['load-snapshot', 'delivery3']); // 6,7 const expectedTranscript = convertTranscript(ts, tsStart); t.deepEqual(data.transcripts, { v1: expectedTranscript }); const uncompressedSnapshot = Buffer.from(snapshotData); const expectedSnapshots = []; - if (mode === 'historical') { + if (artifactMode === 'debug') { expectedSnapshots.push({ uncompressedSnapshot, hash: snapHash, inUse: 0, - snapPos: 2, + snapPos: 4, }); } expectedSnapshots.push({ uncompressedSnapshot, hash: snapHash, inUse: 1, - snapPos: 5, + snapPos: 7, }); t.deepEqual(await convertSnapshots(data.snapshots), { v1: expectedSnapshots, @@ -225,12 +250,12 @@ const importTest = test.macro(async (t, mode) => { ]; t.deepEqual( spanRows.map(sr => sr.startPos), - [0, 3, 6], + [0, 2, 5, 8], ); // and a new export should include all metadata, regardless of import mode - const reExporter = makeSwingStoreExporter(dbDir, 'current'); + const reExporter = makeSwingStoreExporter(dbDir); const reExportData = new Map(); for await (const [key, value] of reExporter.getExportData()) { reExportData.set(key, value); @@ -248,18 +273,20 @@ const importTest = test.macro(async (t, mode) => { }; check('kv.key1', 'value1'); - check('snapshot.v1.2', { vatID: 'v1', snapPos: 2, inUse: 0, hash: snapHash }); - check('snapshot.v1.5', { vatID: 'v1', snapPos: 5, inUse: 1, hash: snapHash }); - check('snapshot.v1.current', 'snapshot.v1.5'); - const base = { vatID: 'v1', incarnation: 0, isCurrent: 0 }; - check('transcript.v1.0', { ...base, startPos: 0, endPos: 3, hash: t0hash }); - check('transcript.v1.3', { ...base, startPos: 3, endPos: 6, hash: t3hash }); + check('snapshot.v1.4', { vatID: 'v1', snapPos: 4, inUse: 0, hash: snapHash }); + check('snapshot.v1.7', { vatID: 'v1', snapPos: 7, inUse: 1, hash: snapHash }); + check('snapshot.v1.current', 'snapshot.v1.7'); + const base0 = { vatID: 'v1', incarnation: 0, isCurrent: 0 }; + const base1 = { vatID: 'v1', incarnation: 1, isCurrent: 0 }; + check('transcript.v1.0', { ...base0, startPos: 0, endPos: 2, hash: t0hash }); + check('transcript.v1.2', { ...base1, startPos: 2, endPos: 5, hash: t2hash }); + check('transcript.v1.5', { ...base1, startPos: 5, endPos: 8, hash: t5hash }); check('transcript.v1.current', { - ...base, - startPos: 6, - endPos: 8, + ...base1, + startPos: 8, + endPos: 10, isCurrent: 1, - hash: t6hash, + hash: t8hash, }); check(`bundle.${bundle0ID}`, bundle0ID); @@ -270,8 +297,10 @@ const importTest = test.macro(async (t, mode) => { } }); -test('import current', importTest, 'current'); -test('import historical', importTest, 'historical'); +test('import operational', importTest, 'operational'); +test('import replay', importTest, 'replay'); +test('import archival', importTest, 'archival'); +test('import debug', importTest, 'debug'); test('import is missing bundle', async t => { const [dbDir, cleanup] = await tmpDir('testdb'); diff --git a/packages/swing-store/test/test-repair-metadata.js b/packages/swing-store/test/test-repair-metadata.js index 1629c9fdcc8..38f2f9972a7 100644 --- a/packages/swing-store/test/test-repair-metadata.js +++ b/packages/swing-store/test/test-repair-metadata.js @@ -38,9 +38,9 @@ test('repair metadata', async t => { // assert that all the metadata is there at first const ts1 = getTS.all('v1'); - t.deepEqual(ts1, [0, 3, 6]); // three spans + t.deepEqual(ts1, [0, 2, 5, 8]); // four spans const ss1 = getSS.all('v1'); - t.deepEqual(ss1, [2, 5]); // two snapshots + t.deepEqual(ss1, [4, 7]); // two snapshots // now clobber them to simulate #8025 (note: these auto-commit) db.prepare('DELETE FROM transcriptSpans WHERE isCurrent IS NULL').run(); @@ -48,9 +48,9 @@ test('repair metadata', async t => { // confirm that we clobbered them const ts2 = getTS.all('v1'); - t.deepEqual(ts2, [6]); // only the latest + t.deepEqual(ts2, [8]); // only the latest const ss2 = getSS.all('v1'); - t.deepEqual(ss2, [5]); + t.deepEqual(ss2, [7]); // now fix it await ss.hostStorage.repairMetadata(exporter); @@ -58,17 +58,17 @@ test('repair metadata', async t => { // and check that the metadata is back const ts3 = getTS.all('v1'); - t.deepEqual(ts3, [0, 3, 6]); // all three again + t.deepEqual(ts3, [0, 2, 5, 8]); // all four again const ss3 = getSS.all('v1'); - t.deepEqual(ss3, [2, 5]); + t.deepEqual(ss3, [4, 7]); // repair should be idempotent await ss.hostStorage.repairMetadata(exporter); const ts4 = getTS.all('v1'); - t.deepEqual(ts4, [0, 3, 6]); // still there + t.deepEqual(ts4, [0, 2, 5, 8]); // still there const ss4 = getSS.all('v1'); - t.deepEqual(ss4, [2, 5]); + t.deepEqual(ss4, [4, 7]); }); test('repair metadata ignores kvStore entries', async t => { @@ -101,9 +101,9 @@ test('repair metadata rejects mismatched snapshot entries', async t => { await ss.hostStorage.commit(); // perform the repair with mismatched snapshot entry - const old = JSON.parse(exportData.get('snapshot.v1.2')); + const old = JSON.parse(exportData.get('snapshot.v1.4')); const wrong = { ...old, hash: 'wrong' }; - exportData.set('snapshot.v1.2', JSON.stringify(wrong)); + exportData.set('snapshot.v1.4', JSON.stringify(wrong)); await t.throwsAsync(async () => ss.hostStorage.repairMetadata(exporter), { message: /repairSnapshotRecord metadata mismatch/, From df729030643a097262ad1393503d380e243107eb Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Tue, 11 Jul 2023 22:30:12 +0000 Subject: [PATCH 095/109] feat(x/swingset): add store data to genesis --- .../proto/agoric/swingset/genesis.proto | 10 + golang/cosmos/types/kv_entry_helpers.go | 14 + golang/cosmos/x/swingset/genesis.go | 38 +- golang/cosmos/x/swingset/types/genesis.pb.go | 326 +++++++++++++++++- 4 files changed, 362 insertions(+), 26 deletions(-) diff --git a/golang/cosmos/proto/agoric/swingset/genesis.proto b/golang/cosmos/proto/agoric/swingset/genesis.proto index 46cefb2b69a..8a178e4e12e 100644 --- a/golang/cosmos/proto/agoric/swingset/genesis.proto +++ b/golang/cosmos/proto/agoric/swingset/genesis.proto @@ -13,4 +13,14 @@ message GenesisState { Params params = 2 [(gogoproto.nullable) = false]; State state = 3 [(gogoproto.nullable) = false]; + + repeated SwingStoreExportDataEntry swing_store_export_data = 4 [ + (gogoproto.jsontag) = "swingStoreExportData" + ]; +} + +// A SwingStore "export data" entry. +message SwingStoreExportDataEntry { + string key = 1; + string value = 2; } diff --git a/golang/cosmos/types/kv_entry_helpers.go b/golang/cosmos/types/kv_entry_helpers.go index d6bd20b8e7a..7ee16de189a 100644 --- a/golang/cosmos/types/kv_entry_helpers.go +++ b/golang/cosmos/types/kv_entry_helpers.go @@ -5,6 +5,7 @@ import ( "fmt" "io" + swingsettypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" vstoragetypes "github.com/Agoric/agoric-sdk/golang/cosmos/x/vstorage/types" sdk "github.com/cosmos/cosmos-sdk/types" ) @@ -36,6 +37,8 @@ import ( // types are available: // - NewVstorageDataEntriesReader constructs a reader from a slice of // vstorage DataEntry values. +// - NewSwingStoreExportDataEntriesReader constructs a reader from a slice +// of SwingStoreExportDataEntry values. // - NewJsonRawMessageKVEntriesReader constructs a reader from a slice of // [key: string, value?: string | null] JSON array values. // - NewJsonlKVEntryDecoderReader constructs a reader from an io.ReadCloser @@ -154,6 +157,17 @@ func NewVstorageDataEntriesReader(vstorageDataEntries []*vstoragetypes.DataEntry } } +// NewSwingStoreExportDataEntriesReader creates a KVEntryReader backed by +// a SwingStoreExportDataEntry slice +func NewSwingStoreExportDataEntriesReader(exportDataEntries []*swingsettypes.SwingStoreExportDataEntry) KVEntryReader { + return &kvEntriesReader[*swingsettypes.SwingStoreExportDataEntry]{ + entries: exportDataEntries, + toKVEntry: func(sourceEntry *swingsettypes.SwingStoreExportDataEntry) (KVEntry, error) { + return NewKVEntry(sourceEntry.Key, sourceEntry.Value), nil + }, + } +} + // NewJsonRawMessageKVEntriesReader creates a KVEntryReader backed by // a json.RawMessage slice func NewJsonRawMessageKVEntriesReader(jsonEntries []json.RawMessage) KVEntryReader { diff --git a/golang/cosmos/x/swingset/genesis.go b/golang/cosmos/x/swingset/genesis.go index 59a2725a263..5b27c603b70 100644 --- a/golang/cosmos/x/swingset/genesis.go +++ b/golang/cosmos/x/swingset/genesis.go @@ -8,10 +8,6 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" ) -func NewGenesisState() *types.GenesisState { - return &types.GenesisState{} -} - func ValidateGenesis(data *types.GenesisState) error { if data == nil { return fmt.Errorf("swingset genesis data cannot be nil") @@ -24,7 +20,9 @@ func ValidateGenesis(data *types.GenesisState) error { func DefaultGenesisState() *types.GenesisState { return &types.GenesisState{ - Params: types.DefaultParams(), + Params: types.DefaultParams(), + State: types.State{}, + SwingStoreExportData: []*types.SwingStoreExportDataEntry{}, } } @@ -34,13 +32,37 @@ func InitGenesis(ctx sdk.Context, keeper Keeper, data *types.GenesisState) bool keeper.SetParams(ctx, data.GetParams()) keeper.SetState(ctx, data.GetState()) + swingStoreExportData := data.GetSwingStoreExportData() + if len(swingStoreExportData) > 0 { + // See https://github.com/Agoric/agoric-sdk/issues/6527 + panic("genesis with swing-store state not implemented") + } + // TODO: bootstrap only if not restoring swing-store from genesis state return true } func ExportGenesis(ctx sdk.Context, k Keeper) *types.GenesisState { - gs := NewGenesisState() - gs.Params = k.GetParams(ctx) - gs.State = k.GetState(ctx) + gs := &types.GenesisState{ + Params: k.GetParams(ctx), + State: k.GetState(ctx), + SwingStoreExportData: []*types.SwingStoreExportDataEntry{}, + } + + // Only export the swing-store shadow copy for now + // TODO: + // - perform state-sync export with check blockHeight (figure out how to + // handle export of historical height), + // - include swing-store artifacts in genesis state + // See https://github.com/Agoric/agoric-sdk/issues/6527 + exportDataIterator := k.GetSwingStore(ctx).Iterator(nil, nil) + defer exportDataIterator.Close() + for ; exportDataIterator.Valid(); exportDataIterator.Next() { + entry := types.SwingStoreExportDataEntry{ + Key: string(exportDataIterator.Key()), + Value: string(exportDataIterator.Value()), + } + gs.SwingStoreExportData = append(gs.SwingStoreExportData, &entry) + } return gs } diff --git a/golang/cosmos/x/swingset/types/genesis.pb.go b/golang/cosmos/x/swingset/types/genesis.pb.go index acaa75a5272..47d94b1e51f 100644 --- a/golang/cosmos/x/swingset/types/genesis.pb.go +++ b/golang/cosmos/x/swingset/types/genesis.pb.go @@ -25,8 +25,9 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // The initial or exported state. type GenesisState struct { - Params Params `protobuf:"bytes,2,opt,name=params,proto3" json:"params"` - State State `protobuf:"bytes,3,opt,name=state,proto3" json:"state"` + Params Params `protobuf:"bytes,2,opt,name=params,proto3" json:"params"` + State State `protobuf:"bytes,3,opt,name=state,proto3" json:"state"` + SwingStoreExportData []*SwingStoreExportDataEntry `protobuf:"bytes,4,rep,name=swing_store_export_data,json=swingStoreExportData,proto3" json:"swingStoreExportData"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -76,29 +77,96 @@ func (m *GenesisState) GetState() State { return State{} } +func (m *GenesisState) GetSwingStoreExportData() []*SwingStoreExportDataEntry { + if m != nil { + return m.SwingStoreExportData + } + return nil +} + +// A SwingStore "export data" entry. +type SwingStoreExportDataEntry struct { + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *SwingStoreExportDataEntry) Reset() { *m = SwingStoreExportDataEntry{} } +func (m *SwingStoreExportDataEntry) String() string { return proto.CompactTextString(m) } +func (*SwingStoreExportDataEntry) ProtoMessage() {} +func (*SwingStoreExportDataEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_49b057311de9d296, []int{1} +} +func (m *SwingStoreExportDataEntry) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *SwingStoreExportDataEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_SwingStoreExportDataEntry.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *SwingStoreExportDataEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_SwingStoreExportDataEntry.Merge(m, src) +} +func (m *SwingStoreExportDataEntry) XXX_Size() int { + return m.Size() +} +func (m *SwingStoreExportDataEntry) XXX_DiscardUnknown() { + xxx_messageInfo_SwingStoreExportDataEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_SwingStoreExportDataEntry proto.InternalMessageInfo + +func (m *SwingStoreExportDataEntry) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *SwingStoreExportDataEntry) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + func init() { proto.RegisterType((*GenesisState)(nil), "agoric.swingset.GenesisState") + proto.RegisterType((*SwingStoreExportDataEntry)(nil), "agoric.swingset.SwingStoreExportDataEntry") } func init() { proto.RegisterFile("agoric/swingset/genesis.proto", fileDescriptor_49b057311de9d296) } var fileDescriptor_49b057311de9d296 = []byte{ - // 234 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4d, 0x4c, 0xcf, 0x2f, - 0xca, 0x4c, 0xd6, 0x2f, 0x2e, 0xcf, 0xcc, 0x4b, 0x2f, 0x4e, 0x2d, 0xd1, 0x4f, 0x4f, 0xcd, 0x4b, - 0x2d, 0xce, 0x2c, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x87, 0x48, 0xeb, 0xc1, 0xa4, - 0xa5, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, 0xc1, 0x72, 0xfa, 0x20, 0x16, 0x44, 0x99, 0x94, 0x1c, 0xba, - 0x29, 0x30, 0x06, 0x44, 0x5e, 0xa9, 0x9e, 0x8b, 0xc7, 0x1d, 0x62, 0x6e, 0x70, 0x49, 0x62, 0x49, - 0xaa, 0x90, 0x29, 0x17, 0x5b, 0x41, 0x62, 0x51, 0x62, 0x6e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, - 0xb7, 0x91, 0xb8, 0x1e, 0x9a, 0x3d, 0x7a, 0x01, 0x60, 0x69, 0x27, 0x96, 0x13, 0xf7, 0xe4, 0x19, - 0x82, 0xa0, 0x8a, 0x85, 0x8c, 0xb8, 0x58, 0x8b, 0x41, 0xfa, 0x25, 0x98, 0xc1, 0xba, 0xc4, 0x30, - 0x74, 0x81, 0x4d, 0x87, 0x6a, 0x82, 0x28, 0xb5, 0x62, 0x79, 0xb1, 0x40, 0x9e, 0xc1, 0x29, 0xf4, - 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, - 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0xac, 0xd3, 0x33, 0x4b, 0x32, 0x4a, - 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0x1d, 0x21, 0xbe, 0x80, 0x98, 0xaa, 0x5b, 0x9c, 0x92, 0xad, - 0x9f, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x9f, 0x9c, 0x5f, 0x9c, 0x9b, 0x5f, 0xac, 0x5f, 0x81, - 0xf0, 0x60, 0x49, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0x7b, 0xc6, 0x80, 0x00, 0x00, 0x00, - 0xff, 0xff, 0x65, 0xe6, 0xb9, 0x87, 0x46, 0x01, 0x00, 0x00, + // 334 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0xcf, 0x4b, 0x02, 0x41, + 0x1c, 0xc5, 0x77, 0xf2, 0x07, 0x38, 0x06, 0xc5, 0x22, 0xb9, 0x09, 0x8d, 0xe2, 0x49, 0x82, 0x76, + 0xc0, 0xe8, 0x52, 0xa7, 0x2c, 0xe9, 0x1a, 0x2b, 0x5d, 0xba, 0xc8, 0xa8, 0xc3, 0xb4, 0xa8, 0x3b, + 0xcb, 0x7c, 0xc7, 0x52, 0xfa, 0x27, 0xfa, 0x13, 0xfa, 0x73, 0x3c, 0x7a, 0xec, 0x24, 0xa1, 0x97, + 0xe8, 0x6f, 0xe8, 0x10, 0x3b, 0xa3, 0x04, 0x6a, 0xb7, 0xb7, 0xfb, 0x79, 0xef, 0x0d, 0x33, 0x0f, + 0x9f, 0x30, 0x21, 0x55, 0xd8, 0xa5, 0xf0, 0x12, 0x46, 0x02, 0xb8, 0xa6, 0x82, 0x47, 0x1c, 0x42, + 0xf0, 0x63, 0x25, 0xb5, 0x74, 0x0f, 0x2c, 0xf6, 0xd7, 0xb8, 0x54, 0x10, 0x52, 0x48, 0xc3, 0x68, + 0xa2, 0xac, 0xad, 0x44, 0x36, 0x5b, 0xd6, 0xc2, 0xf2, 0xea, 0x0f, 0xc2, 0xfb, 0x77, 0xb6, 0xb8, + 0xa5, 0x99, 0xe6, 0xee, 0x05, 0xce, 0xc6, 0x4c, 0xb1, 0x21, 0x78, 0x7b, 0x15, 0x54, 0xcb, 0xd7, + 0x8b, 0xfe, 0xc6, 0x41, 0xfe, 0xbd, 0xc1, 0x8d, 0xf4, 0x74, 0x5e, 0x76, 0x82, 0x95, 0xd9, 0xad, + 0xe3, 0x0c, 0x24, 0x79, 0x2f, 0x65, 0x52, 0x47, 0x5b, 0x29, 0xd3, 0xbe, 0x0a, 0x59, 0xab, 0xfb, + 0x8a, 0x8b, 0x06, 0xb7, 0x41, 0x4b, 0xc5, 0xdb, 0x7c, 0x1c, 0x4b, 0xa5, 0xdb, 0x3d, 0xa6, 0x99, + 0x97, 0xae, 0xa4, 0x6a, 0xf9, 0xfa, 0xe9, 0x76, 0x4b, 0x22, 0x5a, 0x89, 0xbd, 0x69, 0xdc, 0xb7, + 0x4c, 0xb3, 0x66, 0xa4, 0xd5, 0xa4, 0xe1, 0x7d, 0xcf, 0xcb, 0x05, 0xd8, 0x81, 0x83, 0x9d, 0x7f, + 0x2f, 0xd3, 0x5f, 0xef, 0x65, 0xa7, 0x7a, 0x83, 0x8f, 0xff, 0xad, 0x74, 0x0f, 0x71, 0xaa, 0xcf, + 0x27, 0x1e, 0xaa, 0xa0, 0x5a, 0x2e, 0x48, 0xa4, 0x5b, 0xc0, 0x99, 0x67, 0x36, 0x18, 0x71, 0xf3, + 0x36, 0xb9, 0xc0, 0x7e, 0x34, 0x1e, 0xa6, 0x0b, 0x82, 0x66, 0x0b, 0x82, 0x3e, 0x17, 0x04, 0xbd, + 0x2d, 0x89, 0x33, 0x5b, 0x12, 0xe7, 0x63, 0x49, 0x9c, 0xc7, 0x2b, 0x11, 0xea, 0xa7, 0x51, 0xc7, + 0xef, 0xca, 0x21, 0xbd, 0xb6, 0x43, 0xd8, 0x1b, 0x9d, 0x41, 0xaf, 0x4f, 0x85, 0x1c, 0xb0, 0x48, + 0xd0, 0xae, 0x84, 0xa1, 0x04, 0x3a, 0xfe, 0xdb, 0x48, 0x4f, 0x62, 0x0e, 0x9d, 0xac, 0x59, 0xe8, + 0xfc, 0x37, 0x00, 0x00, 0xff, 0xff, 0x94, 0xe9, 0x22, 0x36, 0x09, 0x02, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -121,6 +189,20 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if len(m.SwingStoreExportData) > 0 { + for iNdEx := len(m.SwingStoreExportData) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.SwingStoreExportData[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x22 + } + } { size, err := m.State.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -144,6 +226,43 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *SwingStoreExportDataEntry) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *SwingStoreExportDataEntry) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *SwingStoreExportDataEntry) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Value) > 0 { + i -= len(m.Value) + copy(dAtA[i:], m.Value) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.Value))) + i-- + dAtA[i] = 0x12 + } + if len(m.Key) > 0 { + i -= len(m.Key) + copy(dAtA[i:], m.Key) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.Key))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { offset -= sovGenesis(v) base := offset @@ -165,6 +284,29 @@ func (m *GenesisState) Size() (n int) { n += 1 + l + sovGenesis(uint64(l)) l = m.State.Size() n += 1 + l + sovGenesis(uint64(l)) + if len(m.SwingStoreExportData) > 0 { + for _, e := range m.SwingStoreExportData { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + return n +} + +func (m *SwingStoreExportDataEntry) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.Key) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } + l = len(m.Value) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } return n } @@ -269,6 +411,154 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field SwingStoreExportData", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.SwingStoreExportData = append(m.SwingStoreExportData, &SwingStoreExportDataEntry{}) + if err := m.SwingStoreExportData[len(m.SwingStoreExportData)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *SwingStoreExportDataEntry) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: SwingStoreExportDataEntry: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: SwingStoreExportDataEntry: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Key = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Value = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenesis(dAtA[iNdEx:]) From ebbb9829f1f845c0932ae92b23d0d43be9a0e196 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 24 Jul 2023 21:41:48 +0000 Subject: [PATCH 096/109] feat(cosmic-swingset): add repair-metadata snapshot restore option --- .../swingset/keeper/extension_snapshotter.go | 2 +- .../keeper/swing_store_exports_handler.go | 16 ++++++++-- .../cosmic-swingset/src/export-kernel-db.js | 10 +++++- .../cosmic-swingset/src/import-kernel-db.js | 31 +++++++++++++++++-- 4 files changed, 51 insertions(+), 8 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go index e067d5112e9..0e73dc59970 100644 --- a/golang/cosmos/x/swingset/keeper/extension_snapshotter.go +++ b/golang/cosmos/x/swingset/keeper/extension_snapshotter.go @@ -304,6 +304,6 @@ func (snapshotter *ExtensionSnapshotter) RestoreExtension(blockHeight uint64, fo return snapshotter.swingStoreExportsHandler.RestoreExport( SwingStoreExportProvider{BlockHeight: blockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}, - SwingStoreRestoreOptions{ArtifactMode: SwingStoreArtifactModeReplay}, + SwingStoreRestoreOptions{ArtifactMode: SwingStoreArtifactModeReplay, ExportDataMode: SwingStoreExportDataModeAll}, ) } diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index 84ee6dc1acb..564db5de630 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -185,6 +185,11 @@ const ( // an export. ArtifactMode cannot be "none" in this case. SwingStoreExportDataModeSkip = "skip" + // SwingStoreExportDataModeRepairMetadata indicates the "export data" should be + // used to repair the metadata of an existing swing-store for an import + // operation. ArtifactMode must be "none" in this case. + SwingStoreExportDataModeRepairMetadata = "repair-metadata" + // SwingStoreExportDataModeAll indicates "export data" should be part of the // export or import. For import, ArtifactMode cannot be "none". SwingStoreExportDataModeAll = "all" @@ -211,6 +216,12 @@ type SwingStoreRestoreOptions struct { // (None, Operational, Replay, Archival, Debug). // See packages/cosmic-swingset/src/import-kernel-db.js performStateSyncImport ArtifactMode string `json:"artifactMode,omitempty"` + // ExportDataMode selects the purpose of the restore, to recreate a + // swing-store (SwingStoreExportDataModeAll), or just to import missing + // metadata (SwingStoreExportDataModeRepairMetadata). + // If RepairMetadata, ArtifactMode should be SwingStoreArtifactModeNone. + // If All, ArtifactMode must be at least SwingStoreArtifactModeOperational. + ExportDataMode string `json:"exportDataMode,omitempty"` } type swingStoreImportOptions struct { @@ -219,8 +230,7 @@ type swingStoreImportOptions struct { ExportDir string `json:"exportDir"` // ArtifactMode is a copy of SwingStoreRestoreOptions.ArtifactMode ArtifactMode string `json:"artifactMode,omitempty"` - // ExportDataMode must currently be "all" for import, since "export data" is - // needed to restore a swing-store. + // ExportDataMode is a copy of SwingStoreRestoreOptions.ExportDataMode ExportDataMode string `json:"exportDataMode,omitempty"` } @@ -810,7 +820,7 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore Args: [1]swingStoreImportOptions{{ ExportDir: exportDir, ArtifactMode: restoreOptions.ArtifactMode, - ExportDataMode: SwingStoreExportDataModeAll, + ExportDataMode: restoreOptions.ExportDataMode, }}, } diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index c8be41916f1..2878658fe12 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -31,6 +31,7 @@ export const ExportManifestFileName = 'export-manifest.json'; /** * @typedef {'skip' // Do not include any "export data" (artifacts only) + * | 'repair-metadata' // Add missing artifact metadata (import only) * | 'all' // Include all export data, create new swing-store on import * } SwingStoreExportDataMode */ @@ -60,15 +61,22 @@ export const checkArtifactMode = getEffectiveArtifactMode; /** * @param {string | undefined} mode + * @param {boolean} [isImport] * @returns {asserts mode is SwingStoreExportDataMode | undefined} */ -export const checkExportDataMode = mode => { +export const checkExportDataMode = (mode, isImport = false) => { switch (mode) { case 'skip': case undefined: break; case 'all': break; + case 'repair-metadata': { + if (isImport) { + break; + } + // Fall through + } default: throw Fail`Invalid value ${q(mode)} for "export-data-mode"`; } diff --git a/packages/cosmic-swingset/src/import-kernel-db.js b/packages/cosmic-swingset/src/import-kernel-db.js index 8ab3c402db0..d41e6309cd1 100755 --- a/packages/cosmic-swingset/src/import-kernel-db.js +++ b/packages/cosmic-swingset/src/import-kernel-db.js @@ -13,7 +13,7 @@ import pathPower from 'path'; import BufferLineTransform from '@agoric/internal/src/node/buffer-line-transform.js'; import { Fail, q } from '@agoric/assert'; -import { importSwingStore } from '@agoric/swing-store'; +import { importSwingStore, openSwingStore } from '@agoric/swing-store'; import { isEntrypoint } from './helpers/is-entrypoint.js'; import { makeProcessValue } from './helpers/process-value.js'; @@ -45,7 +45,7 @@ export const validateImporterOptions = options => { options.blockHeight == null || typeof options.blockHeight === 'number' || Fail`optional blockHeight option not a number`; - checkExportDataMode(options.exportDataMode); + checkExportDataMode(options.exportDataMode, true); checkArtifactMode(options.artifactMode); options.includeHistorical === undefined || Fail`deprecated includeHistorical option found`; @@ -104,6 +104,7 @@ const checkAndGetImportSwingStoreOptions = (options, manifest) => { * @param {Pick & Pick} powers.fs * @param {import('path')['resolve']} powers.pathResolve * @param {typeof import('@agoric/swing-store')['importSwingStore']} [powers.importSwingStore] + * @param {typeof import('@agoric/swing-store')['openSwingStore']} [powers.openSwingStore] * @param {null | ((...args: any[]) => void)} [powers.log] * @returns {Promise} */ @@ -113,6 +114,7 @@ export const performStateSyncImport = async ( fs: { createReadStream, readFile }, pathResolve, importSwingStore: importDB = importSwingStore, + openSwingStore: openDB = openSwingStore, log = console.log, }, ) => { @@ -193,6 +195,29 @@ export const performStateSyncImport = async ( const { hostStorage } = swingstore; hostStorage.kvStore.set('host.height', String(manifest.blockHeight)); + await hostStorage.commit(); + await hostStorage.close(); + } else if (exportDataMode === 'repair-metadata') { + blockHeight !== 0 || Fail`repair metadata requires a block height`; + + manifest.data || Fail`State-sync manifest missing export data`; + + artifactMode === 'none' || + Fail`Cannot restore artifacts while repairing metadata`; + + const { hostStorage } = openDB(stateDir); + + const savedBlockHeight = + Number(hostStorage.kvStore.get('host.height')) || 0; + + if (blockHeight !== savedBlockHeight) { + throw Fail`block height doesn't match. requested=${q( + blockHeight, + )}, current=${q(savedBlockHeight)}`; + } + + await hostStorage.repairMetadata(exporter); + await hostStorage.commit(); await hostStorage.close(); } else if (exportDataMode === 'skip') { @@ -241,7 +266,7 @@ export const main = async ( checkArtifactMode(artifactMode); const exportDataMode = processValue.getFlag('export-data-mode'); - checkExportDataMode(exportDataMode); + checkExportDataMode(exportDataMode, true); if ( processValue.getBoolean({ flagName: 'include-historical' }) !== undefined From dd8cdf047df05d3c6bca5d1e55e119a33bc0cbf7 Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Mon, 14 Aug 2023 18:11:34 -0700 Subject: [PATCH 097/109] fix(cosmic-swingset): backwards param compat in import/export --- .../cosmic-swingset/src/export-kernel-db.js | 35 ++++++++++--------- .../cosmic-swingset/src/import-kernel-db.js | 6 +++- 2 files changed, 24 insertions(+), 17 deletions(-) diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index 7705c488c03..3da83614a08 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -11,7 +11,7 @@ import pathPower from 'path'; import { fileURLToPath } from 'url'; import { makePromiseKit } from '@endo/promise-kit'; -import { Fail } from '@agoric/assert'; +import { Fail, q } from '@agoric/assert'; import { makeAggregateError } from '@agoric/internal'; import { makeShutdown } from '@agoric/internal/src/node/shutdown.js'; import { openSwingStore, makeSwingStoreExporter } from '@agoric/swing-store'; @@ -19,29 +19,35 @@ import { openSwingStore, makeSwingStoreExporter } from '@agoric/swing-store'; import { isEntrypoint } from './helpers/is-entrypoint.js'; import { makeProcessValue } from './helpers/process-value.js'; -/** @typedef {'current' | 'archival' | 'debug'} SwingStoreExportMode */ - // ExportManifestFilename is the manifest filename which must be synchronized // with the golang SwingStoreExportsHandler in golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go export const ExportManifestFileName = 'export-manifest.json'; -// eslint-disable-next-line jsdoc/require-returns-check +/** @typedef {'current' | 'archival' | 'debug'} SwingStoreExportMode */ + /** - * @param {string | undefined} mode - * @returns {asserts mode is SwingStoreExportMode | undefined} + * @param {SwingStoreExportMode | undefined} exportMode + * @returns {import("@agoric/swing-store").ArtifactMode} */ -const checkExportMode = mode => { - switch (mode) { +const getArtifactModeFromExportMode = exportMode => { + switch (exportMode) { case 'current': + case undefined: + return 'operational'; case 'archival': + return 'archival'; case 'debug': - case undefined: - break; + return 'debug'; default: - throw Fail`Invalid value ${mode} for "export-mode"`; + throw Fail`Invalid value ${q(exportMode)} for "export-mode"`; } }; +/** + * @type {(exportMode: string | undefined) => asserts exportMode is SwingStoreExportMode} + */ +const checkExportMode = getArtifactModeFromExportMode; + /** * A state-sync manifest is a representation of the information contained in a * swingStore export for a given block. @@ -116,10 +122,7 @@ export const initiateSwingStoreExport = ( log = console.log, }, ) => { - const effectiveExportMode = exportMode ?? 'current'; - if (effectiveExportMode !== 'current' && !includeExportData) { - throw Fail`Must include export data if export mode not "current"`; - } + const artifactMode = getArtifactModeFromExportMode(exportMode); /** @type {number | undefined} */ let savedBlockHeight; @@ -140,7 +143,7 @@ export const initiateSwingStoreExport = ( const manifestFile = await open(manifestPath, 'wx'); cleanup.push(async () => manifestFile.close()); - const swingStoreExporter = makeExporter(stateDir, exportMode); + const swingStoreExporter = makeExporter(stateDir, { artifactMode }); cleanup.push(async () => swingStoreExporter.close()); const { hostStorage } = openDB(stateDir); diff --git a/packages/cosmic-swingset/src/import-kernel-db.js b/packages/cosmic-swingset/src/import-kernel-db.js index ff1d3a5f9a8..bed9d42cfb8 100755 --- a/packages/cosmic-swingset/src/import-kernel-db.js +++ b/packages/cosmic-swingset/src/import-kernel-db.js @@ -141,7 +141,11 @@ export const performStateSyncImport = async ( }, }); - const swingstore = await importDB(exporter, stateDir, { includeHistorical }); + const artifactMode = includeHistorical + ? 'debug' // for now don't enforce completeness but allow importing all provided artifacts + : 'operational'; + + const swingstore = await importDB(exporter, stateDir, { artifactMode }); const { hostStorage } = swingstore; From 6ba19571688518dcfdc4553a0c822695a61908b1 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Mon, 24 Jul 2023 23:53:02 +0000 Subject: [PATCH 098/109] feat(cosmos): fix and migrate swing-store --- golang/cosmos/app/app.go | 95 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 94 insertions(+), 1 deletion(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 9668dfd37eb..82db55ed54f 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -810,6 +810,42 @@ func NewAgoricApp( return app } +type swingStoreMigrationEventHandler struct { + swingStore sdk.KVStore +} + +func (eventHandler swingStoreMigrationEventHandler) OnExportStarted(height uint64, retrieveSwingStoreExport func() error) error { + return retrieveSwingStoreExport() +} + +func (eventHandler swingStoreMigrationEventHandler) OnExportRetrieved(provider swingsetkeeper.SwingStoreExportProvider) (err error) { + exportDataReader, err := provider.GetExportDataReader() + if err != nil { + return err + } + defer exportDataReader.Close() + + var hasExportData bool + + for { + entry, err := exportDataReader.Read() + if err == io.EOF { + break + } else if err != nil { + return err + } + hasExportData = true + if !entry.HasValue() { + return fmt.Errorf("no value for export data key %s", entry.Key()) + } + eventHandler.swingStore.Set([]byte(entry.Key()), []byte(entry.StringValue())) + } + if !hasExportData { + return fmt.Errorf("export data had no entries") + } + return nil +} + // upgrade11Handler performs standard upgrade actions plus custom actions for upgrade-11. func upgrade11Handler(app *GaiaApp, targetUpgrade string) func(sdk.Context, upgradetypes.Plan, module.VersionMap) (module.VersionMap, error) { return func(ctx sdk.Context, plan upgradetypes.Plan, fromVm module.VersionMap) (module.VersionMap, error) { @@ -817,7 +853,64 @@ func upgrade11Handler(app *GaiaApp, targetUpgrade string) func(sdk.Context, upgr // Record the plan to send to SwingSet app.upgradePlan = &plan - // TODO: Migrate x/vstorage swingStore to x/swingset SwingStore + // Perform swing-store migrations. We do this in the app upgrade handler + // since it involves multiple modules (x/vstorage and x/swingset) which + // don't strictly have a version change on their own. + + // We are at the begining of the upgrade block, so all stores are commited + // as of the end of the previous block + savedBlockHeight := uint64(ctx.BlockHeight() - 1) + + // First, repair swing-store metadata in case this node was previously + // initialized from a state-sync snapshot. This is done with a check on the + // block height to catch early any hangover related mismatch. + // Only entries related to missing historical metadata are imported, but we + // don't know what these look like here, so we provide it all. + getSwingStoreExportDataFromVstorage := func() (reader agorictypes.KVEntryReader, err error) { + return agorictypes.NewVstorageDataEntriesReader( + app.VstorageKeeper.ExportStorageFromPrefix(ctx, swingsetkeeper.StoragePathSwingStore), + ), nil + } + + // We're not restoring any artifact to swing-store, nor have any to provide + readNoArtifact := func() (artifact swingsettypes.SwingStoreArtifact, err error) { + return artifact, io.EOF + } + + err := app.SwingStoreExportsHandler.RestoreExport( + swingsetkeeper.SwingStoreExportProvider{ + BlockHeight: savedBlockHeight, + GetExportDataReader: getSwingStoreExportDataFromVstorage, + ReadNextArtifact: readNoArtifact, + }, + swingsetkeeper.SwingStoreRestoreOptions{ + ArtifactMode: swingsetkeeper.SwingStoreArtifactModeNone, + ExportDataMode: swingsetkeeper.SwingStoreExportDataModeRepairMetadata, + }, + ) + if err != nil { + return nil, err + } + + // Then migrate the swing-store shadow copy: + // 1. Remove the swing-store "export data" shadow-copy entries from vstorage. + // 2. Export swing-store "export-data" (as of the previous block) through a + // handler that writes every entry into the swingset module's new Store. + app.VstorageKeeper.RemoveEntriesWithPrefix(ctx, swingsetkeeper.StoragePathSwingStore) + err = app.SwingStoreExportsHandler.InitiateExport( + savedBlockHeight, + swingStoreMigrationEventHandler{swingStore: app.SwingSetKeeper.GetSwingStore(ctx)}, + swingsetkeeper.SwingStoreExportOptions{ + ArtifactMode: swingsetkeeper.SwingStoreArtifactModeNone, + ExportDataMode: swingsetkeeper.SwingStoreExportDataModeAll, + }, + ) + if err == nil { + err = swingsetkeeper.WaitUntilSwingStoreExportDone() + } + if err != nil { + return nil, err + } // Always run module migrations mvm, err := app.mm.RunMigrations(ctx, app.configurator, fromVm) From 1d6f3f1325482ec41b7dd574c3060619b94415fa Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 27 Jul 2023 18:38:27 +0000 Subject: [PATCH 099/109] feat(deployment): add state-sync to upgrade 11 test --- .../agoric-upgrade-10/actions.sh | 12 ++++ .../agoric-upgrade-10/env_setup.sh | 3 +- .../agoric-upgrade-11/actions.sh | 20 +++++- .../agoric-upgrade-11/env_setup.sh | 63 ++++++++++++++++++- .../agoric-upgrade-11/test.sh | 17 +++++ 5 files changed, 112 insertions(+), 3 deletions(-) create mode 100755 packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh index 0aa7e992a95..cc53553a99b 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/actions.sh @@ -176,12 +176,24 @@ agops perf satisfaction --from "$USER2ADDR" --executeOffer "$OFFER" --keyring-ba # replicate state-sync of node # this will cause the swing-store to prune some data +# we will save the pruned artifact for later killAgd EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-10-XXX) make_swing_store_snapshot $EXPORT_DIR || fail "Couldn't make swing-store snapshot" test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store export data" +EXPORT_DIR_ALL_ARTIFACTS=$(mktemp -t -d swing-store-export-upgrade-10-all-artifacts-XXX) +make_swing_store_snapshot $EXPORT_DIR_ALL_ARTIFACTS --export-mode archival || fail "Couldn't make swing-store snapshot for historical artifacts" restore_swing_store_snapshot $EXPORT_DIR || fail "Couldn't restore swing-store snapshot" +( + cd $EXPORT_DIR_ALL_ARTIFACTS + mkdir $HOME/.agoric/data/agoric/swing-store-historical-artifacts + for i in *; do + [ -f $EXPORT_DIR/$i ] && continue + mv $i $HOME/.agoric/data/agoric/swing-store-historical-artifacts/ + done +) rm -rf $EXPORT_DIR +rm -rf $EXPORT_DIR_ALL_ARTIFACTS startAgd # # TODO fully test bidding diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh index ad484864d3a..22d6425887f 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-10/env_setup.sh @@ -109,7 +109,8 @@ submitDeliverInbound() { make_swing_store_snapshot() {( set -euo pipefail EXPORT_DIR="$1" - /usr/src/agoric-sdk/packages/cosmic-swingset/src/export-kernel-db.js --home "$HOME/.agoric" --export-dir "$EXPORT_DIR" --verbose --include-export-data + shift + /usr/src/agoric-sdk/packages/cosmic-swingset/src/export-kernel-db.js --home "$HOME/.agoric" --export-dir "$EXPORT_DIR" --verbose --include-export-data "$@" EXPORT_MANIFEST_FILE="$EXPORT_DIR/export-manifest.json" EXPORT_DATA_FILE="$EXPORT_DIR/$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh index 53a9292d65a..f29d64f6e65 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh @@ -2,4 +2,22 @@ . ./upgrade-test-scripts/env_setup.sh -# Core-eval contract upgrade +# Enable debugging +set -x + +# CWD is agoric-sdk +upgrade11=./upgrade-test-scripts/agoric-upgrade-11 + +# hacky restore of pruned artifacts +killAgd +EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-11-XXX) +make_swing_store_snapshot $EXPORT_DIR --artifact-mode debug || fail "Couldn't make swing-store snapshot" +test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store export data" +HISTORICAL_ARTIFACTS="$(cd $HOME/.agoric/data/agoric/swing-store-historical-artifacts/; for i in *; do echo -n "[\"$i\",\"$i\"],"; done)" +mv -n $HOME/.agoric/data/agoric/swing-store-historical-artifacts/* $EXPORT_DIR || fail "some historical artifacts not pruned" +mv $EXPORT_DIR/export-manifest.json $EXPORT_DIR/export-manifest-original.json +cat $EXPORT_DIR/export-manifest-original.json | jq -r ".artifacts = .artifacts + [${HISTORICAL_ARTIFACTS%%,}] | del(.artifactMode)" > $EXPORT_DIR/export-manifest.json +restore_swing_store_snapshot $EXPORT_DIR || fail "Couldn't restore swing-store snapshot" +rmdir $HOME/.agoric/data/agoric/swing-store-historical-artifacts +rm -rf $EXPORT_DIR +startAgd diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh index 11e8e98ba3f..0faaf3a66b5 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh @@ -83,4 +83,65 @@ pushPriceOnce () { else echo "ERROR: pushPrice failed (using $nextOracle)" fi -} \ No newline at end of file +} + +export_genesis() { + HEIGHT_ARG= + + if [ -n "$1" ]; then + HEIGHT_ARG="--height $1" + shift + fi + + agd export $HEIGHT_ARG "$@" +} + +make_swing_store_snapshot() {( set -euo pipefail + EXPORT_DIR="$1" + shift + /usr/src/agoric-sdk/packages/cosmic-swingset/src/export-kernel-db.js --home "$HOME/.agoric" --export-dir "$EXPORT_DIR" --verbose --artifact-mode replay --export-data-mode all "$@" + + EXPORT_MANIFEST_FILE="$EXPORT_DIR/export-manifest.json" + EXPORT_DATA_FILE="$EXPORT_DIR/$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" + EXPORT_DATA_UNTRUSTED_FILE="${EXPORT_DATA_FILE%.*}-untrusted.jsonl" + EXPORT_HEIGHT=$(cat "$EXPORT_MANIFEST_FILE" | jq -r .blockHeight) + EXPORT_MANIFEST="$(cat $EXPORT_MANIFEST_FILE)" + + mv "$EXPORT_DATA_FILE" "$EXPORT_DATA_UNTRUSTED_FILE" + export_genesis $EXPORT_HEIGHT | jq -cr '.app_state.swingset.swing_store_export_data[] | [.key,.value]' > "$EXPORT_DATA_FILE" + + jq -n "$EXPORT_MANIFEST | .untrustedData=\"$(basename -- "$EXPORT_DATA_UNTRUSTED_FILE")\"" > "$EXPORT_MANIFEST_FILE" + + echo "Successful swing-store export for block $EXPORT_HEIGHT" +)} + +restore_swing_store_snapshot() {( set -euo pipefail + rm -f $HOME/.agoric/data/agoric/swingstore.sqlite + EXPORT_DIR="$1" + shift + + /usr/src/agoric-sdk/packages/cosmic-swingset/src/import-kernel-db.js --home "$HOME/.agoric" --export-dir "$EXPORT_DIR" --verbose --artifact-mode replay --export-data-mode all "$@" +)} + +compare_swing_store_export_data() { + EXPORT_DIR="$1" + EXPORT_MANIFEST_FILE="$EXPORT_DIR/export-manifest.json" + EXPORT_DATA_FILE="$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" + EXPORT_DATA_UNTRUSTED_FILE="$(cat "$EXPORT_MANIFEST_FILE" | jq -r .untrustedData)" + + if [ -z "$EXPORT_DATA_FILE" ]; then + echo "missing-export-data" + return + fi + + if [ -z "$EXPORT_DATA_UNTRUSTED_FILE" ]; then + echo "missing-untrusted-export-data" + return + fi + + diff <(cat "$EXPORT_DIR/$EXPORT_DATA_FILE" | sort) <(cat "$EXPORT_DIR/$EXPORT_DATA_UNTRUSTED_FILE" | sort) >&2 && { + echo "match" + } || { + echo "mismatch" + } +} diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh new file mode 100755 index 00000000000..35b279e25cf --- /dev/null +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +. ./upgrade-test-scripts/env_setup.sh + +echo Wait for actions to settle +waitForBlock 2 + +# CWD is agoric-sdk +upgrade11=./upgrade-test-scripts/agoric-upgrade-11 + +# verify swing-store export-data is consistent +killAgd +EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-11-XXX) +make_swing_store_snapshot $EXPORT_DIR --artifact-mode none || fail "Couldn't make swing-store snapshot" +test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store consistent state-sync" +rm -rf $EXPORT_DIR +startAgd From 4cce97b5cd290b040de843431dab55570719f4bc Mon Sep 17 00:00:00 2001 From: Michael FIG Date: Sun, 30 Jul 2023 15:36:46 -0600 Subject: [PATCH 100/109] ci(test-docker-build): try making more free disk space --- .github/workflows/test-all-packages.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/.github/workflows/test-all-packages.yml b/.github/workflows/test-all-packages.yml index 0c752ada3db..a1a6c167974 100644 --- a/.github/workflows/test-all-packages.yml +++ b/.github/workflows/test-all-packages.yml @@ -569,6 +569,17 @@ jobs: matrix: bootstrap-version: ['test', 'main'] steps: + - name: free up additional worker space + run: | + # Workaround to provide additional free space for testing. + # https://github.com/actions/virtual-environments/issues/2840 + df -h + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + df -h + - uses: actions/checkout@v3 - name: docker build (sdk) run: cd packages/deployment && ./scripts/test-docker-build.sh | $TEST_COLLECT From 12e49bbc0acbae3b1d6cffd89e1d99a645b736ba Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 17 Aug 2023 21:52:27 +0000 Subject: [PATCH 101/109] refactor(x/swingset): explicit read/write swing-store export directory --- .../keeper/swing_store_exports_handler.go | 106 +++++++++++------- 1 file changed, 64 insertions(+), 42 deletions(-) diff --git a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go index 564db5de630..2cc7e83f588 100644 --- a/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go +++ b/golang/cosmos/x/swingset/keeper/swing_store_exports_handler.go @@ -638,19 +638,42 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved defer os.RemoveAll(exportDir) - rawManifest, err := os.ReadFile(filepath.Join(exportDir, ExportManifestFilename)) + provider, err := OpenSwingStoreExportDirectory(exportDir) if err != nil { return err } - var manifest exportManifest - err = json.Unmarshal(rawManifest, &manifest) + if blockHeight != 0 && provider.BlockHeight != blockHeight { + return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", provider.BlockHeight, blockHeight) + } + + err = onExportRetrieved(provider) if err != nil { return err } - if blockHeight != 0 && manifest.BlockHeight != blockHeight { - return fmt.Errorf("export manifest blockHeight (%d) doesn't match (%d)", manifest.BlockHeight, blockHeight) + operationDetails.logger.Info("retrieved swing-store export", "exportDir", exportDir) + + return nil +} + +// OpenSwingStoreExportDirectory creates an export provider from a swing-store +// export saved on disk in the provided directory. It expects the export manifest +// to be present in that directory. The provider's function will read the +// export's data and artifacts from disk on demand. Each artifact is using a +// dedicated file, and the export data is read from a jsonl-like file, if any. +// The export manifest filename and overall export format is common with the JS +// swing-store import/export logic. +func OpenSwingStoreExportDirectory(exportDir string) (SwingStoreExportProvider, error) { + rawManifest, err := os.ReadFile(filepath.Join(exportDir, ExportManifestFilename)) + if err != nil { + return SwingStoreExportProvider{}, err + } + + var manifest exportManifest + err = json.Unmarshal(rawManifest, &manifest) + if err != nil { + return SwingStoreExportProvider{}, err } getExportDataReader := func() (agoric.KVEntryReader, error) { @@ -689,18 +712,7 @@ func (exportsHandler SwingStoreExportsHandler) retrieveExport(onExportRetrieved return artifact, err } - err = onExportRetrieved(SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}) - if err != nil { - return err - } - - // if nextArtifact != len(manifest.Artifacts) { - // return errors.New("not all export artifacts were retrieved") - // } - - operationDetails.logger.Info("retrieved swing-store export", "exportDir", exportDir) - - return nil + return SwingStoreExportProvider{BlockHeight: manifest.BlockHeight, GetExportDataReader: getExportDataReader, ReadNextArtifact: readNextArtifact}, nil } // RestoreExport restores the JS swing-store using previously exported data and artifacts. @@ -739,8 +751,41 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore } defer os.RemoveAll(exportDir) - manifest := exportManifest{ + err = WriteSwingStoreExportToDirectory(provider, exportDir) + if err != nil { + return err + } + + action := &swingStoreRestoreExportAction{ + Type: swingStoreExportActionType, BlockHeight: blockHeight, + Request: restoreRequest, + Args: [1]swingStoreImportOptions{{ + ExportDir: exportDir, + ArtifactMode: restoreOptions.ArtifactMode, + ExportDataMode: restoreOptions.ExportDataMode, + }}, + } + + _, err = exportsHandler.blockingSend(action, true) + if err != nil { + return err + } + + exportsHandler.logger.Info("restored swing-store export", "exportDir", exportDir, "height", blockHeight) + + return nil +} + +// WriteSwingStoreExportToDirectory consumes a provider and saves a swing-store +// export to disk in the provided directory. It creates files for each artifact +// deriving a filename from the artifact name, and stores any "export data" in +// a jsonl-like file, before saving the export manifest linking these together. +// The export manifest filename and overall export format is common with the JS +// swing-store import/export logic. +func WriteSwingStoreExportToDirectory(provider SwingStoreExportProvider, exportDir string) error { + manifest := exportManifest{ + BlockHeight: provider.BlockHeight, } exportDataReader, err := provider.GetExportDataReader() @@ -808,28 +853,5 @@ func (exportsHandler SwingStoreExportsHandler) RestoreExport(provider SwingStore if err != nil { return err } - err = writeExportFile(ExportManifestFilename, manifestBytes) - if err != nil { - return err - } - - action := &swingStoreRestoreExportAction{ - Type: swingStoreExportActionType, - BlockHeight: blockHeight, - Request: restoreRequest, - Args: [1]swingStoreImportOptions{{ - ExportDir: exportDir, - ArtifactMode: restoreOptions.ArtifactMode, - ExportDataMode: restoreOptions.ExportDataMode, - }}, - } - - _, err = exportsHandler.blockingSend(action, true) - if err != nil { - return err - } - - exportsHandler.logger.Info("restored swing-store export", "exportDir", exportDir, "height", blockHeight) - - return nil + return writeExportFile(ExportManifestFilename, manifestBytes) } From c530fdc72f33d24befb3bff5fe4c94adb0722554 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 21 Jul 2023 23:43:15 +0000 Subject: [PATCH 102/109] fix(cosmic-swingset): log level for swing-store export --- packages/cosmic-swingset/src/chain-main.js | 2 +- packages/cosmic-swingset/src/export-kernel-db.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/cosmic-swingset/src/chain-main.js b/packages/cosmic-swingset/src/chain-main.js index 8082db3dc67..287b23a324d 100644 --- a/packages/cosmic-swingset/src/chain-main.js +++ b/packages/cosmic-swingset/src/chain-main.js @@ -581,7 +581,7 @@ export default async function main(progname, args, { env, homedir, agcc }) { ); }); - console.info( + console.warn( 'Initiating SwingSet state snapshot at block height', blockHeight, 'with options', diff --git a/packages/cosmic-swingset/src/export-kernel-db.js b/packages/cosmic-swingset/src/export-kernel-db.js index 2878658fe12..b9920f355b4 100755 --- a/packages/cosmic-swingset/src/export-kernel-db.js +++ b/packages/cosmic-swingset/src/export-kernel-db.js @@ -353,7 +353,7 @@ export const main = async ( { fs, pathResolve, - log: verbose ? console.log : null, + log: verbose ? console.warn : null, }, ); From fe4eb56facf83569aa343f098e97c6229556afa9 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 21 Jul 2023 23:43:15 +0000 Subject: [PATCH 103/109] feat(cosmos): spawn JS on export command Do not assume deamonization --- golang/cosmos/cmd/agd/main.go | 7 +++++-- golang/cosmos/daemon/cmd/root.go | 6 +++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/golang/cosmos/cmd/agd/main.go b/golang/cosmos/cmd/agd/main.go index 21f3a0db1ce..174ada874f3 100644 --- a/golang/cosmos/cmd/agd/main.go +++ b/golang/cosmos/cmd/agd/main.go @@ -13,7 +13,7 @@ import ( func main() { // We need to delegate to our default app for running the actual chain. - daemoncmd.OnStartHook = func(logger log.Logger) { + launchVM := func(logger log.Logger) { args := []string{"ag-chain-cosmos", "--home", gaia.DefaultNodeHome} args = append(args, os.Args[1:]...) @@ -22,12 +22,15 @@ func main() { panic(lookErr) } - logger.Info("Start chain delegating to JS executable", "binary", binary, "args", args) + logger.Info("agd delegating to JS executable", "binary", binary, "args", args) execErr := syscall.Exec(binary, args, os.Environ()) if execErr != nil { panic(execErr) } } + daemoncmd.OnStartHook = launchVM + daemoncmd.OnExportHook = launchVM + daemon.RunWithController(nil) } diff --git a/golang/cosmos/daemon/cmd/root.go b/golang/cosmos/daemon/cmd/root.go index 8426cd6cacb..339b111ee3b 100644 --- a/golang/cosmos/daemon/cmd/root.go +++ b/golang/cosmos/daemon/cmd/root.go @@ -40,7 +40,8 @@ import ( type Sender func(needReply bool, str string) (string, error) var AppName = "agd" -var OnStartHook func(log.Logger) +var OnStartHook func(logger log.Logger) +var OnExportHook func(logger log.Logger) // NewRootCmd creates a new root command for simd. It is called once in the // main function. @@ -272,6 +273,9 @@ func (ac appCreator) appExport( jailAllowedAddrs []string, appOpts servertypes.AppOptions, ) (servertypes.ExportedApp, error) { + if OnExportHook != nil { + OnExportHook(logger) + } homePath, ok := appOpts.Get(flags.FlagHome).(string) if !ok || homePath == "" { From 8d2571c51c2fe08d630dd2897d7e5e1b45ab45c9 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 18 Aug 2023 20:06:10 +0000 Subject: [PATCH 104/109] feat(cosmos)!: add required export-dir export cmd option --- golang/cosmos/daemon/cmd/root.go | 64 ++++++++++++++++++- packages/cosmic-swingset/test/scenario2.js | 5 +- .../agoric-upgrade-11/actions.sh | 1 - .../agoric-upgrade-11/env_setup.sh | 11 ++-- 4 files changed, 73 insertions(+), 8 deletions(-) diff --git a/golang/cosmos/daemon/cmd/root.go b/golang/cosmos/daemon/cmd/root.go index 339b111ee3b..9fe90a0b018 100644 --- a/golang/cosmos/daemon/cmd/root.go +++ b/golang/cosmos/daemon/cmd/root.go @@ -134,6 +134,14 @@ func initRootCmd(sender Sender, rootCmd *cobra.Command, encodingConfig params.En } server.AddCommands(rootCmd, gaia.DefaultNodeHome, ac.newApp, ac.appExport, addModuleInitFlags) + hasVMController := sender != nil + for _, command := range rootCmd.Commands() { + if command.Name() == "export" { + extendCosmosExportCommand(command, hasVMController) + break + } + } + // add keybase, auxiliary RPC, query, and tx child commands rootCmd.AddCommand( rpc.StatusCommand(), @@ -233,7 +241,9 @@ func (ac appCreator) newApp( panic(err) } - snapshotDir := filepath.Join(cast.ToString(appOpts.Get(flags.FlagHome)), "data", "snapshots") + homePath := cast.ToString(appOpts.Get(flags.FlagHome)) + + snapshotDir := filepath.Join(homePath, "data", "snapshots") snapshotDB, err := sdk.NewLevelDB("metadata", snapshotDir) if err != nil { panic(err) @@ -246,7 +256,7 @@ func (ac appCreator) newApp( return gaia.NewAgoricApp( ac.sender, logger, db, traceStore, true, skipUpgradeHeights, - cast.ToString(appOpts.Get(flags.FlagHome)), + homePath, cast.ToUint(appOpts.Get(server.FlagInvCheckPeriod)), ac.encCfg, appOpts, @@ -264,6 +274,56 @@ func (ac appCreator) newApp( ) } +const ( + // FlagExportDir is the command-line flag for the "export" command specifying + // where the output of the export should be placed. + FlagExportDir = "export-dir" + // ExportedGenesisFileName is the file name used to save the genesis in the export-dir + ExportedGenesisFileName = "genesis.json" +) + +// extendCosmosExportCommand monkey-patches the "export" command added by +// cosmos-sdk to add a required "export-dir" command-line flag, and create the +// genesis export in the specified directory. +func extendCosmosExportCommand(cmd *cobra.Command, hasVMController bool) { + cmd.Flags().String(FlagExportDir, "", "The directory where to create the genesis export") + err := cmd.MarkFlagRequired(FlagExportDir) + if err != nil { + panic(err) + } + + originalRunE := cmd.RunE + + extendedRunE := func(cmd *cobra.Command, args []string) error { + exportDir, _ := cmd.Flags().GetString(FlagExportDir) + err := os.MkdirAll(exportDir, os.ModePerm) + if err != nil { + return err + } + + genesisPath := filepath.Join(exportDir, ExportedGenesisFileName) + + // This will fail is a genesis.json already exists in the export-dir + genesisFile, err := os.OpenFile(genesisPath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, os.ModePerm) + if err != nil { + return err + } + defer genesisFile.Close() + + cmd.SetOut(genesisFile) + + return originalRunE(cmd, args) + } + + // Only modify the command handler when we have a VM controller to handle + // the full export logic. Otherwise, appExport will just exec the VM program + // (OnExportHook), which will result in re-entering this flow with the VM + // controller set. + if hasVMController { + cmd.RunE = extendedRunE + } +} + func (ac appCreator) appExport( logger log.Logger, db dbm.DB, diff --git a/packages/cosmic-swingset/test/scenario2.js b/packages/cosmic-swingset/test/scenario2.js index 1a54b80e9af..bc5656da90e 100644 --- a/packages/cosmic-swingset/test/scenario2.js +++ b/packages/cosmic-swingset/test/scenario2.js @@ -80,7 +80,10 @@ export const makeScenario2 = ({ pspawnMake, pspawnAgd, log }) => { return runMake(['scenario2-run-rosetta-ci'], { stdio: onlyStderr }); }, export: () => - pspawnAgd(['export', '--home=t1/n0'], { stdio: onlyStderr }).exit, + pspawnAgd( + ['export', '--home=t1/n0', '--export-dir=t1/n0/genesis-export'], + { stdio: onlyStderr }, + ).exit, }); }; diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh index c405782ea93..50bee9bb93b 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh @@ -18,7 +18,6 @@ mv -n $HOME/.agoric/data/agoric/swing-store-historical-artifacts/* $EXPORT_DIR | mv $EXPORT_DIR/export-manifest.json $EXPORT_DIR/export-manifest-original.json cat $EXPORT_DIR/export-manifest-original.json | jq -r ".artifacts = .artifacts + [${HISTORICAL_ARTIFACTS%%,}] | del(.artifactMode)" > $EXPORT_DIR/export-manifest.json restore_swing_store_snapshot $EXPORT_DIR || fail "Couldn't restore swing-store snapshot" -rmdir $HOME/.agoric/data/agoric/swing-store-historical-artifacts rm -rf $EXPORT_DIR startAgd diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh index 0faaf3a66b5..4ab804dcd9e 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh @@ -86,14 +86,16 @@ pushPriceOnce () { } export_genesis() { - HEIGHT_ARG= + GENESIS_EXPORT_DIR="$1" + shift + GENESIS_HEIGHT_ARG= if [ -n "$1" ]; then - HEIGHT_ARG="--height $1" + GENESIS_HEIGHT_ARG="--height $1" shift fi - agd export $HEIGHT_ARG "$@" + agd export --export-dir "$GENESIS_EXPORT_DIR" $GENESIS_HEIGHT_ARG "$@" } make_swing_store_snapshot() {( set -euo pipefail @@ -108,7 +110,8 @@ make_swing_store_snapshot() {( set -euo pipefail EXPORT_MANIFEST="$(cat $EXPORT_MANIFEST_FILE)" mv "$EXPORT_DATA_FILE" "$EXPORT_DATA_UNTRUSTED_FILE" - export_genesis $EXPORT_HEIGHT | jq -cr '.app_state.swingset.swing_store_export_data[] | [.key,.value]' > "$EXPORT_DATA_FILE" + export_genesis "$EXPORT_DIR/genesis-export" $EXPORT_HEIGHT + cat $EXPORT_DIR/genesis-export/genesis.json | jq -cr '.app_state.swingset.swing_store_export_data[] | [.key,.value]' > "$EXPORT_DATA_FILE" jq -n "$EXPORT_MANIFEST | .untrustedData=\"$(basename -- "$EXPORT_DATA_UNTRUSTED_FILE")\"" > "$EXPORT_MANIFEST_FILE" From e5f9425e74c7235323cd6b1b88540b73b57a69a6 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Fri, 21 Jul 2023 23:43:15 +0000 Subject: [PATCH 105/109] feat(x/swingset): export swing store in genesis --- golang/cosmos/app/app.go | 11 +++- golang/cosmos/daemon/cmd/root.go | 20 ++++++- golang/cosmos/x/swingset/genesis.go | 56 ++++++++++++++++--- golang/cosmos/x/swingset/module.go | 29 +++++++--- .../agoric-upgrade-11/actions.sh | 3 +- .../agoric-upgrade-11/env_setup.sh | 17 +++--- 6 files changed, 109 insertions(+), 27 deletions(-) diff --git a/golang/cosmos/app/app.go b/golang/cosmos/app/app.go index 82db55ed54f..f5932d90c95 100644 --- a/golang/cosmos/app/app.go +++ b/golang/cosmos/app/app.go @@ -126,6 +126,14 @@ import ( const appName = "agoric" +// FlagSwingStoreExportDir defines the config flag used to specify where a +// genesis swing-store export is expected. For start from genesis, the default +// value is config/swing-store in the home directory. For genesis export, the +// value is always a "swing-store" directory sibling to the exported +// genesis.json file. +// TODO: document this flag in config, likely alongside the genesis path +const FlagSwingStoreExportDir = "swing-store-export-dir" + var ( // DefaultNodeHome default home directories for the application daemon DefaultNodeHome string @@ -588,6 +596,7 @@ func NewAgoricApp( app.EvidenceKeeper = *evidenceKeeper skipGenesisInvariants := cast.ToBool(appOpts.Get(crisis.FlagSkipGenesisInvariants)) + swingStoreExportDir := cast.ToString(appOpts.Get(FlagSwingStoreExportDir)) // NOTE: Any module instantiated in the module manager that is later modified // must be passed by reference here. @@ -617,7 +626,7 @@ func NewAgoricApp( transferModule, icaModule, vstorage.NewAppModule(app.VstorageKeeper), - swingset.NewAppModule(app.SwingSetKeeper, setBootstrapNeeded, app.ensureControllerInited), + swingset.NewAppModule(app.SwingSetKeeper, &app.SwingStoreExportsHandler, setBootstrapNeeded, app.ensureControllerInited, swingStoreExportDir), vibcModule, vbankModule, lienModule, diff --git a/golang/cosmos/daemon/cmd/root.go b/golang/cosmos/daemon/cmd/root.go index 9fe90a0b018..95c6c1f2433 100644 --- a/golang/cosmos/daemon/cmd/root.go +++ b/golang/cosmos/daemon/cmd/root.go @@ -276,10 +276,15 @@ func (ac appCreator) newApp( const ( // FlagExportDir is the command-line flag for the "export" command specifying - // where the output of the export should be placed. + // where the output of the export should be placed. It contains both the + // items names below: the genesis file, and a directory containing the + // exported swing-store artifacts FlagExportDir = "export-dir" // ExportedGenesisFileName is the file name used to save the genesis in the export-dir ExportedGenesisFileName = "genesis.json" + // ExportedSwingStoreDirectoryName is the directory name used to save the swing-store + // export (artifacts only) in the export-dir + ExportedSwingStoreDirectoryName = "swing-store" ) // extendCosmosExportCommand monkey-patches the "export" command added by @@ -295,6 +300,8 @@ func extendCosmosExportCommand(cmd *cobra.Command, hasVMController bool) { originalRunE := cmd.RunE extendedRunE := func(cmd *cobra.Command, args []string) error { + serverCtx := server.GetServerContextFromCmd(cmd) + exportDir, _ := cmd.Flags().GetString(FlagExportDir) err := os.MkdirAll(exportDir, os.ModePerm) if err != nil { @@ -302,6 +309,17 @@ func extendCosmosExportCommand(cmd *cobra.Command, hasVMController bool) { } genesisPath := filepath.Join(exportDir, ExportedGenesisFileName) + swingStoreExportPath := filepath.Join(exportDir, ExportedSwingStoreDirectoryName) + + err = os.MkdirAll(swingStoreExportPath, os.ModePerm) + if err != nil { + return err + } + // We unconditionally set FlagSwingStoreExportDir as for export, it makes + // little sense for users to control this location separately, and we don't + // want to override any swing-store artifacts that may be associated to the + // current genesis. + serverCtx.Viper.Set(gaia.FlagSwingStoreExportDir, swingStoreExportPath) // This will fail is a genesis.json already exists in the export-dir genesisFile, err := os.OpenFile(genesisPath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, os.ModePerm) diff --git a/golang/cosmos/x/swingset/genesis.go b/golang/cosmos/x/swingset/genesis.go index 5b27c603b70..1334da697bd 100644 --- a/golang/cosmos/x/swingset/genesis.go +++ b/golang/cosmos/x/swingset/genesis.go @@ -4,6 +4,8 @@ import ( // "os" "fmt" + agoric "github.com/Agoric/agoric-sdk/golang/cosmos/types" + "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/keeper" "github.com/Agoric/agoric-sdk/golang/cosmos/x/swingset/types" sdk "github.com/cosmos/cosmos-sdk/types" ) @@ -42,19 +44,13 @@ func InitGenesis(ctx sdk.Context, keeper Keeper, data *types.GenesisState) bool return true } -func ExportGenesis(ctx sdk.Context, k Keeper) *types.GenesisState { +func ExportGenesis(ctx sdk.Context, k Keeper, swingStoreExportsHandler *SwingStoreExportsHandler, swingStoreExportDir string) *types.GenesisState { gs := &types.GenesisState{ Params: k.GetParams(ctx), State: k.GetState(ctx), SwingStoreExportData: []*types.SwingStoreExportDataEntry{}, } - // Only export the swing-store shadow copy for now - // TODO: - // - perform state-sync export with check blockHeight (figure out how to - // handle export of historical height), - // - include swing-store artifacts in genesis state - // See https://github.com/Agoric/agoric-sdk/issues/6527 exportDataIterator := k.GetSwingStore(ctx).Iterator(nil, nil) defer exportDataIterator.Close() for ; exportDataIterator.Valid(); exportDataIterator.Next() { @@ -64,5 +60,51 @@ func ExportGenesis(ctx sdk.Context, k Keeper) *types.GenesisState { } gs.SwingStoreExportData = append(gs.SwingStoreExportData, &entry) } + + snapshotHeight := uint64(ctx.BlockHeight()) + + err := swingStoreExportsHandler.InitiateExport( + // The export will fail if the export of a historical height was requested + snapshotHeight, + swingStoreGenesisEventHandler{exportDir: swingStoreExportDir, snapshotHeight: snapshotHeight}, + // The export will fail if the swing-store does not contain all replay artifacts + keeper.SwingStoreExportOptions{ + ArtifactMode: keeper.SwingStoreArtifactModeReplay, + ExportDataMode: keeper.SwingStoreExportDataModeSkip, + }, + ) + if err != nil { + panic(err) + } + + err = keeper.WaitUntilSwingStoreExportDone() + if err != nil { + panic(err) + } + return gs } + +type swingStoreGenesisEventHandler struct { + exportDir string + snapshotHeight uint64 +} + +func (eventHandler swingStoreGenesisEventHandler) OnExportStarted(height uint64, retrieveSwingStoreExport func() error) error { + return retrieveSwingStoreExport() +} + +func (eventHandler swingStoreGenesisEventHandler) OnExportRetrieved(provider keeper.SwingStoreExportProvider) error { + if eventHandler.snapshotHeight != provider.BlockHeight { + return fmt.Errorf("snapshot block height (%d) doesn't match requested height (%d)", provider.BlockHeight, eventHandler.snapshotHeight) + } + + artifactsProvider := keeper.SwingStoreExportProvider{ + GetExportDataReader: func() (agoric.KVEntryReader, error) { + return nil, nil + }, + ReadNextArtifact: provider.ReadNextArtifact, + } + + return keeper.WriteSwingStoreExportToDirectory(artifactsProvider, eventHandler.exportDir) +} diff --git a/golang/cosmos/x/swingset/module.go b/golang/cosmos/x/swingset/module.go index a5f180beba4..42b207dab38 100644 --- a/golang/cosmos/x/swingset/module.go +++ b/golang/cosmos/x/swingset/module.go @@ -80,18 +80,22 @@ func (AppModuleBasic) GetTxCmd() *cobra.Command { type AppModule struct { AppModuleBasic - keeper Keeper - setBootstrapNeeded func() - ensureControllerInited func(sdk.Context) + keeper Keeper + swingStoreExportsHandler *SwingStoreExportsHandler + setBootstrapNeeded func() + ensureControllerInited func(sdk.Context) + swingStoreExportDir string } // NewAppModule creates a new AppModule Object -func NewAppModule(k Keeper, setBootstrapNeeded func(), ensureControllerInited func(sdk.Context)) AppModule { +func NewAppModule(k Keeper, swingStoreExportsHandler *SwingStoreExportsHandler, setBootstrapNeeded func(), ensureControllerInited func(sdk.Context), swingStoreExportDir string) AppModule { am := AppModule{ - AppModuleBasic: AppModuleBasic{}, - keeper: k, - setBootstrapNeeded: setBootstrapNeeded, - ensureControllerInited: ensureControllerInited, + AppModuleBasic: AppModuleBasic{}, + keeper: k, + swingStoreExportsHandler: swingStoreExportsHandler, + setBootstrapNeeded: setBootstrapNeeded, + ensureControllerInited: ensureControllerInited, + swingStoreExportDir: swingStoreExportDir, } return am } @@ -150,6 +154,12 @@ func (am AppModule) EndBlock(ctx sdk.Context, req abci.RequestEndBlock) []abci.V return []abci.ValidatorUpdate{} } +func (am AppModule) checkSwingStoreExportSetup() { + if am.swingStoreExportDir == "" { + panic(fmt.Errorf("SwingStore export dir not set")) + } +} + func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, data json.RawMessage) []abci.ValidatorUpdate { var genesisState types.GenesisState cdc.MustUnmarshalJSON(data, &genesisState) @@ -161,6 +171,7 @@ func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, data json. } func (am AppModule) ExportGenesis(ctx sdk.Context, cdc codec.JSONCodec) json.RawMessage { - gs := ExportGenesis(ctx, am.keeper) + am.checkSwingStoreExportSetup() + gs := ExportGenesis(ctx, am.keeper, am.swingStoreExportsHandler, am.swingStoreExportDir) return cdc.MustMarshalJSON(gs) } diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh index 50bee9bb93b..9d5987b8ef6 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh @@ -11,8 +11,7 @@ upgrade11=./upgrade-test-scripts/agoric-upgrade-11 # hacky restore of pruned artifacts killAgd EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-11-XXX) -make_swing_store_snapshot $EXPORT_DIR --artifact-mode debug || fail "Couldn't make swing-store snapshot" -test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store export data" +WITHOUT_GENESIS_EXPORT=1 make_swing_store_snapshot $EXPORT_DIR --artifact-mode debug || fail "Couldn't make swing-store snapshot" HISTORICAL_ARTIFACTS="$(cd $HOME/.agoric/data/agoric/swing-store-historical-artifacts/; for i in *; do echo -n "[\"$i\",\"$i\"],"; done)" mv -n $HOME/.agoric/data/agoric/swing-store-historical-artifacts/* $EXPORT_DIR || fail "some historical artifacts not pruned" mv $EXPORT_DIR/export-manifest.json $EXPORT_DIR/export-manifest-original.json diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh index 4ab804dcd9e..3a9537719f1 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/env_setup.sh @@ -104,16 +104,19 @@ make_swing_store_snapshot() {( set -euo pipefail /usr/src/agoric-sdk/packages/cosmic-swingset/src/export-kernel-db.js --home "$HOME/.agoric" --export-dir "$EXPORT_DIR" --verbose --artifact-mode replay --export-data-mode all "$@" EXPORT_MANIFEST_FILE="$EXPORT_DIR/export-manifest.json" - EXPORT_DATA_FILE="$EXPORT_DIR/$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" - EXPORT_DATA_UNTRUSTED_FILE="${EXPORT_DATA_FILE%.*}-untrusted.jsonl" EXPORT_HEIGHT=$(cat "$EXPORT_MANIFEST_FILE" | jq -r .blockHeight) - EXPORT_MANIFEST="$(cat $EXPORT_MANIFEST_FILE)" + + [ "x${WITHOUT_GENESIS_EXPORT:-0}" = "x1" ] || { + EXPORT_DATA_FILE="$EXPORT_DIR/$(cat "$EXPORT_MANIFEST_FILE" | jq -r .data)" + EXPORT_DATA_UNTRUSTED_FILE="${EXPORT_DATA_FILE%.*}-untrusted.jsonl" + EXPORT_MANIFEST="$(cat $EXPORT_MANIFEST_FILE)" - mv "$EXPORT_DATA_FILE" "$EXPORT_DATA_UNTRUSTED_FILE" - export_genesis "$EXPORT_DIR/genesis-export" $EXPORT_HEIGHT - cat $EXPORT_DIR/genesis-export/genesis.json | jq -cr '.app_state.swingset.swing_store_export_data[] | [.key,.value]' > "$EXPORT_DATA_FILE" + mv "$EXPORT_DATA_FILE" "$EXPORT_DATA_UNTRUSTED_FILE" + export_genesis "$EXPORT_DIR/genesis-export" $EXPORT_HEIGHT + cat $EXPORT_DIR/genesis-export/genesis.json | jq -cr '.app_state.swingset.swing_store_export_data[] | [.key,.value]' > "$EXPORT_DATA_FILE" - jq -n "$EXPORT_MANIFEST | .untrustedData=\"$(basename -- "$EXPORT_DATA_UNTRUSTED_FILE")\"" > "$EXPORT_MANIFEST_FILE" + jq -n "$EXPORT_MANIFEST | .untrustedData=\"$(basename -- "$EXPORT_DATA_UNTRUSTED_FILE")\"" > "$EXPORT_MANIFEST_FILE" + } echo "Successful swing-store export for block $EXPORT_HEIGHT" )} From db1f0e62159fe594823ba5f248991c82d4d2775b Mon Sep 17 00:00:00 2001 From: Ikenna Omekam Date: Wed, 16 Aug 2023 19:31:31 -0400 Subject: [PATCH 106/109] test: add tests from agoric-upgrade-10 to agoric-upgrade-11 --- .../agoric-upgrade-11/actions.sh | 26 ++++++++++++++++ .../agoric-upgrade-11/pre_test.sh | 30 +++++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100755 packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/pre_test.sh diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh index f29d64f6e65..c405782ea93 100644 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/actions.sh @@ -21,3 +21,29 @@ restore_swing_store_snapshot $EXPORT_DIR || fail "Couldn't restore swing-store s rmdir $HOME/.agoric/data/agoric/swing-store-historical-artifacts rm -rf $EXPORT_DIR startAgd + +test_not_val "$(agops vaults list --from $GOV1ADDR)" "" "gov1 has no vaults" + +# open up a vault +OFFER=$(mktemp -t agops.XXX) +agops vaults open --wantMinted 7.00 --giveCollateral 11.0 >|"$OFFER" +agops perf satisfaction --from "$GOV1ADDR" --executeOffer "$OFFER" --keyring-backend=test + +# put some IST in +OFFER=$(mktemp -t agops.XXX) +agops vaults adjust --vaultId vault3 --giveMinted 1.5 --from $GOV1ADDR --keyring-backend=test >|"$OFFER" +agops perf satisfaction --from "$GOV1ADDR" --executeOffer "$OFFER" --keyring-backend=test + +# add some collateral +OFFER=$(mktemp -t agops.XXX) +agops vaults adjust --vaultId vault3 --giveCollateral 2.0 --from $GOV1ADDR --keyring-backend="test" >|"$OFFER" +agops perf satisfaction --from "$GOV1ADDR" --executeOffer "$OFFER" --keyring-backend=test + +# close out +OFFER=$(mktemp -t agops.XXX) +agops vaults close --vaultId vault3 --giveMinted 5.75 --from $GOV1ADDR --keyring-backend="test" >|"$OFFER" +agops perf satisfaction --from "$GOV1ADDR" --executeOffer "$OFFER" --keyring-backend=test + +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault3 -o jsonlines | jq -r '.vaultState') "closed" "vault3 is closed" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault3 -o jsonlines | jq -r '.locked.value') "0" "vault3 contains no collateral" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault3 -o jsonlines | jq -r '.debtSnapshot.debt.value') "0" "vault3 has no debt" diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/pre_test.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/pre_test.sh new file mode 100755 index 00000000000..1db5c34bbdc --- /dev/null +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/pre_test.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +. ./upgrade-test-scripts/env_setup.sh + +echo Wait for upgrade to settle +waitForBlock 5 + +# CWD is agoric-sdk +upgrade11=./upgrade-test-scripts/agoric-upgrade-11 + +# validate agoric-upgrade-10 metrics after update + +test_val $(agd q vstorage children published.vaultFactory.managers.manager0.vaults -o json | jq -r '.children | length') 3 "we have three vaults" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.metrics -o jsonlines | jq -r '.numActiveVaults') 1 "only one vault is active" + +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.metrics -o jsonlines | jq -r '.totalDebt.value') "6030000" "totalDebt is correct" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.metrics -o jsonlines | jq -r '.totalCollateral.value') "8000000" "totalCollateral is correct" + +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault0 -o jsonlines | jq -r '.vaultState') "active" "vault0 is open" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault0 -o jsonlines | jq -r '.locked.value') "8000000" "vault0 contains 8 ATOM collateral" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault0 -o jsonlines | jq -r '.debtSnapshot.debt.value') "6030000" "vault0 debt is 6.03 IST" + +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault1 -o jsonlines | jq -r '.vaultState') "closed" "vault1 is closed" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault1 -o jsonlines | jq -r '.locked.value') "0" "vault1 contains no collateral" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault1 -o jsonlines | jq -r '.debtSnapshot.debt.value') "0" "vault1 has no debt" + +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault2 -o jsonlines | jq -r '.vaultState') "closed" "vault2 is closed" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault2 -o jsonlines | jq -r '.locked.value') "0" "vault2 contains no collateral" +test_val $(agoric follow -l -F :published.vaultFactory.managers.manager0.vaults.vault2 -o jsonlines | jq -r '.debtSnapshot.debt.value') "0" "vault2 has no debt" + From 2446cf43bb13aad7de0805cd7e33c966d2e31016 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Sat, 22 Jul 2023 00:16:08 +0000 Subject: [PATCH 107/109] feat(x/swingset): import swing store from genesis state --- golang/cosmos/daemon/cmd/root.go | 8 +++++ golang/cosmos/x/swingset/genesis.go | 47 ++++++++++++++++++++++++----- golang/cosmos/x/swingset/module.go | 3 +- 3 files changed, 49 insertions(+), 9 deletions(-) diff --git a/golang/cosmos/daemon/cmd/root.go b/golang/cosmos/daemon/cmd/root.go index 95c6c1f2433..35a59ceb101 100644 --- a/golang/cosmos/daemon/cmd/root.go +++ b/golang/cosmos/daemon/cmd/root.go @@ -28,6 +28,7 @@ import ( genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" "github.com/spf13/cast" "github.com/spf13/cobra" + "github.com/spf13/viper" tmcli "github.com/tendermint/tendermint/libs/cli" "github.com/tendermint/tendermint/libs/log" dbm "github.com/tendermint/tm-db" @@ -243,6 +244,13 @@ func (ac appCreator) newApp( homePath := cast.ToString(appOpts.Get(flags.FlagHome)) + // Set a default value for FlagSwingStoreExportDir based on the homePath + // in case we need to InitGenesis with swing-store data + viper, ok := appOpts.(*viper.Viper) + if ok && cast.ToString(appOpts.Get(gaia.FlagSwingStoreExportDir)) == "" { + viper.Set(gaia.FlagSwingStoreExportDir, filepath.Join(homePath, "config", ExportedSwingStoreDirectoryName)) + } + snapshotDir := filepath.Join(homePath, "data", "snapshots") snapshotDB, err := sdk.NewLevelDB("metadata", snapshotDir) if err != nil { diff --git a/golang/cosmos/x/swingset/genesis.go b/golang/cosmos/x/swingset/genesis.go index 1334da697bd..a738e1497c7 100644 --- a/golang/cosmos/x/swingset/genesis.go +++ b/golang/cosmos/x/swingset/genesis.go @@ -30,18 +30,49 @@ func DefaultGenesisState() *types.GenesisState { // InitGenesis initializes the (Cosmos-side) SwingSet state from the GenesisState. // Returns whether the app should send a bootstrap action to the controller. -func InitGenesis(ctx sdk.Context, keeper Keeper, data *types.GenesisState) bool { - keeper.SetParams(ctx, data.GetParams()) - keeper.SetState(ctx, data.GetState()) +func InitGenesis(ctx sdk.Context, k Keeper, swingStoreExportsHandler *SwingStoreExportsHandler, swingStoreExportDir string, data *types.GenesisState) bool { + k.SetParams(ctx, data.GetParams()) + k.SetState(ctx, data.GetState()) swingStoreExportData := data.GetSwingStoreExportData() - if len(swingStoreExportData) > 0 { - // See https://github.com/Agoric/agoric-sdk/issues/6527 - panic("genesis with swing-store state not implemented") + if len(swingStoreExportData) == 0 { + return true } - // TODO: bootstrap only if not restoring swing-store from genesis state - return true + artifactProvider, err := keeper.OpenSwingStoreExportDirectory(swingStoreExportDir) + if err != nil { + panic(err) + } + + swingStore := k.GetSwingStore(ctx) + + for _, entry := range swingStoreExportData { + swingStore.Set([]byte(entry.Key), []byte(entry.Value)) + } + + snapshotHeight := uint64(ctx.BlockHeight()) + + getExportDataReader := func() (agoric.KVEntryReader, error) { + exportDataIterator := swingStore.Iterator(nil, nil) + return agoric.NewKVIteratorReader(exportDataIterator), nil + } + + err = swingStoreExportsHandler.RestoreExport( + keeper.SwingStoreExportProvider{ + BlockHeight: snapshotHeight, + GetExportDataReader: getExportDataReader, + ReadNextArtifact: artifactProvider.ReadNextArtifact, + }, + keeper.SwingStoreRestoreOptions{ + ArtifactMode: keeper.SwingStoreArtifactModeReplay, + ExportDataMode: keeper.SwingStoreExportDataModeAll, + }, + ) + if err != nil { + panic(err) + } + + return false } func ExportGenesis(ctx sdk.Context, k Keeper, swingStoreExportsHandler *SwingStoreExportsHandler, swingStoreExportDir string) *types.GenesisState { diff --git a/golang/cosmos/x/swingset/module.go b/golang/cosmos/x/swingset/module.go index 42b207dab38..ec6f3b4fd45 100644 --- a/golang/cosmos/x/swingset/module.go +++ b/golang/cosmos/x/swingset/module.go @@ -163,7 +163,8 @@ func (am AppModule) checkSwingStoreExportSetup() { func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, data json.RawMessage) []abci.ValidatorUpdate { var genesisState types.GenesisState cdc.MustUnmarshalJSON(data, &genesisState) - bootstrapNeeded := InitGenesis(ctx, am.keeper, &genesisState) + am.checkSwingStoreExportSetup() + bootstrapNeeded := InitGenesis(ctx, am.keeper, am.swingStoreExportsHandler, am.swingStoreExportDir, &genesisState) if bootstrapNeeded { am.setBootstrapNeeded() } From 4223dee4a4173ebc3a32a7615a2cbfe3522fe3e7 Mon Sep 17 00:00:00 2001 From: Mathieu Hofman Date: Thu, 27 Jul 2023 18:43:32 +0000 Subject: [PATCH 108/109] feat(deployment): add genesis export test --- .../upgrade-test-scripts/agoric-upgrade-11/test.sh | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh index 35b279e25cf..86dcbf2d057 100755 --- a/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh +++ b/packages/deployment/upgrade-test/upgrade-test-scripts/agoric-upgrade-11/test.sh @@ -8,10 +8,18 @@ waitForBlock 2 # CWD is agoric-sdk upgrade11=./upgrade-test-scripts/agoric-upgrade-11 -# verify swing-store export-data is consistent +# verify swing-store export-data is consistent and perform genesis style "upgrade" killAgd EXPORT_DIR=$(mktemp -t -d swing-store-export-upgrade-11-XXX) make_swing_store_snapshot $EXPORT_DIR --artifact-mode none || fail "Couldn't make swing-store snapshot" -test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store consistent state-sync" +test_val "$(compare_swing_store_export_data $EXPORT_DIR)" "match" "swing-store consistent cosmos kvstore" + +TMP_GENESIS_DIR=$EXPORT_DIR/genesis-export +cp $HOME/.agoric/config/genesis.json $TMP_GENESIS_DIR/old_genesis.json +cp $HOME/.agoric/data/priv_validator_state.json $TMP_GENESIS_DIR/priv_validator_state.json +rm -rf $HOME/.agoric/data +mkdir $HOME/.agoric/data +mv $TMP_GENESIS_DIR/priv_validator_state.json $HOME/.agoric/data +mv $TMP_GENESIS_DIR/* $HOME/.agoric/config/ rm -rf $EXPORT_DIR startAgd From e886a60677212ff14f792d02de60f3e10e981c7b Mon Sep 17 00:00:00 2001 From: Brian Warner Date: Wed, 23 Aug 2023 16:58:30 -0700 Subject: [PATCH 109/109] chore: patch @lerna/conventional-commits to avoid version bump for prereleases (cherry-picked from commit b1917d65e5544d73c9db6af9896dc3bd5d19891e from trunk, contained in PR #8243) We used to patch this library to reduce the `releaseType` level for our packages that had not yet reached 1.0 . We then switched to an upstream version which performed this reduction itself, except not when doing a "prerelease". Now that we need to do a prerelease, we'd like this reduction behavior in both arms of the conditional, so this commit re-introduces our original patch. refs #8242 --- .../@lerna+conventional-commits+3.22.0.patch | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 patches/@lerna+conventional-commits+3.22.0.patch diff --git a/patches/@lerna+conventional-commits+3.22.0.patch b/patches/@lerna+conventional-commits+3.22.0.patch new file mode 100644 index 00000000000..49a69ff814a --- /dev/null +++ b/patches/@lerna+conventional-commits+3.22.0.patch @@ -0,0 +1,20 @@ +diff --git a/node_modules/@lerna/conventional-commits/lib/recommend-version.js b/node_modules/@lerna/conventional-commits/lib/recommend-version.js +index f524f9d..a9b5427 100644 +--- a/node_modules/@lerna/conventional-commits/lib/recommend-version.js ++++ b/node_modules/@lerna/conventional-commits/lib/recommend-version.js +@@ -53,6 +53,15 @@ function recommendVersion(pkg, type, { changelogPreset, rootPath, tagPrefix, pre + // we still need to bump _something_ because lerna saw a change here + let releaseType = data.releaseType || "patch"; + ++ // Don't gratuitously break compatibility with clients using `^0.x.y`. ++ if (semver.major(pkg.version) === 0) { ++ if (releaseType === "major") { ++ releaseType = "minor"; ++ } else if (releaseType === "minor") { ++ releaseType = "patch"; ++ } ++ } ++ + if (prereleaseId) { + const shouldBump = shouldBumpPrerelease(releaseType, pkg.version); + const prereleaseType = shouldBump ? `pre${releaseType}` : "prerelease";