From eaf233ba1bc2dbd6f9b27da1e995a4ee153add55 Mon Sep 17 00:00:00 2001 From: Sneha Agnihotri <180277+snehaagni@users.noreply.github.com> Date: Mon, 8 Apr 2024 14:40:09 -0700 Subject: [PATCH 01/10] update package.json version (#12745) Signed-off-by: Sneha Agnihotri --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f6efe6844d9..7ee2aaba64b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "chainlink", - "version": "2.9.0", + "version": "2.10.0", "description": "node of the decentralized oracle network, bridging on and off-chain computation", "main": "index.js", "private": true, From 401d126a6331cbf39c7187c8fd4f7c7140f1b25f Mon Sep 17 00:00:00 2001 From: frank zhu Date: Mon, 8 Apr 2024 15:46:39 -0700 Subject: [PATCH 02/10] remove VERSION file to use version from package.json (#12663) * remove VERSION file to use version from package.json * update chainlink.Dockerfile * add jq to dockerfile * update where jq is installed --------- Co-authored-by: Sneha Agnihotri <180277+snehaagni@users.noreply.github.com> --- .github/actions/version-file-bump/action.yml | 23 ++------------------ GNUmakefile | 2 +- README.md | 2 +- VERSION | 1 - core/chainlink.Dockerfile | 4 +++- plugins/chainlink.Dockerfile | 2 +- tools/bin/goreleaser_wrapper | 2 +- tools/bin/ldflags | 2 +- 8 files changed, 10 insertions(+), 28 deletions(-) delete mode 100644 VERSION diff --git a/.github/actions/version-file-bump/action.yml b/.github/actions/version-file-bump/action.yml index 2875234cf17..050ce0153f1 100644 --- a/.github/actions/version-file-bump/action.yml +++ b/.github/actions/version-file-bump/action.yml @@ -28,7 +28,7 @@ runs: id: get-current-version shell: bash run: | - current_version=$(head -n1 ./VERSION) + current_version=$(jq -r '.version' ./package.json) echo "current_version=${current_version}" | tee -a "$GITHUB_OUTPUT" - name: Compare semantic versions uses: smartcontractkit/chainlink-github-actions/semver-compare@7882cf348cd6a1f6bcf1ee8280185584ebba96e9 # v2.3.10 @@ -37,20 +37,6 @@ runs: version1: ${{ steps.get-current-version.outputs.current_version }} operator: eq version2: ${{ steps.get-latest-version.outputs.latest_version }} - # The follow two steps are temp until we migrate to use version from package.json as the source of truth - - name: Get package version - id: get-package-version - shell: bash - run: | - package_version=$(jq -r '.version' ./package.json) - echo "package_version=${package_version}" | tee -a "$GITHUB_OUTPUT" - - name: Diff versions - uses: smartcontractkit/chainlink-github-actions/semver-compare@7882cf348cd6a1f6bcf1ee8280185584ebba96e9 # v2.3.10 - id: diff - with: - version1: ${{ steps.get-current-version.outputs.current_version }} - operator: eq - version2: ${{ steps.get-package-version.outputs.package_version }} - name: Fail if version not bumped # XXX: The reason we are not checking if the current is greater than the # latest release is to account for hot fixes which may have been branched @@ -58,13 +44,8 @@ runs: shell: bash env: VERSION_NOT_BUMPED: ${{ steps.compare.outputs.result }} - VERSION_SAME: ${{ steps.diff.outputs.result }} run: | if [[ "${VERSION_NOT_BUMPED:-}" = "true" ]]; then - echo "Version file not bumped since last release. Please bump the ./VERSION file in the root of the repo and commit the change." - exit 1 - fi - if [[ "${VERSION_SAME:-}" = "false" ]]; then - echo "The version in the VERSION file is not the same as the version in package.json file. Please fix by running `pnpm changeset version`." + echo "The version in `package.json` has not bumped since the last release. Please fix by running `pnpm changeset version`." exit 1 fi diff --git a/GNUmakefile b/GNUmakefile index 6e61563316e..40c86ed3b62 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -1,7 +1,7 @@ .DEFAULT_GOAL := chainlink COMMIT_SHA ?= $(shell git rev-parse HEAD) -VERSION = $(shell cat VERSION) +VERSION = $(shell jq -r '.version' package.json) GO_LDFLAGS := $(shell tools/bin/ldflags) GOFLAGS = -ldflags "$(GO_LDFLAGS)" diff --git a/README.md b/README.md index 82bb8e0b755..882f408050a 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ [![GitHub tag (latest SemVer)](https://img.shields.io/github/v/tag/smartcontractkit/chainlink?style=flat-square)](https://hub.docker.com/r/smartcontract/chainlink/tags) [![GitHub license](https://img.shields.io/github/license/smartcontractkit/chainlink?style=flat-square)](https://github.com/smartcontractkit/chainlink/blob/master/LICENSE) -[![GitHub workflow changelog](https://img.shields.io/github/workflow/status/smartcontractkit/chainlink/Changelog?style=flat-square&label=github-actions)](https://github.com/smartcontractkit/chainlink/actions?query=workflow%3AChangelog) +[![GitHub workflow changeset](https://img.shields.io/github/actions/workflow/status/smartcontractkit/chainlink/changeset.yml)](https://github.com/smartcontractkit/chainlink/actions/workflows/changeset.yml?query=workflow%3AChangeset) [![GitHub contributors](https://img.shields.io/github/contributors-anon/smartcontractkit/chainlink?style=flat-square)](https://github.com/smartcontractkit/chainlink/graphs/contributors) [![GitHub commit activity](https://img.shields.io/github/commit-activity/y/smartcontractkit/chainlink?style=flat-square)](https://github.com/smartcontractkit/chainlink/commits/master) [![Official documentation](https://img.shields.io/static/v1?label=docs&message=latest&color=blue)](https://docs.chain.link/) diff --git a/VERSION b/VERSION deleted file mode 100644 index 10c2c0c3d62..00000000000 --- a/VERSION +++ /dev/null @@ -1 +0,0 @@ -2.10.0 diff --git a/core/chainlink.Dockerfile b/core/chainlink.Dockerfile index f992ee76166..e82a4cd662c 100644 --- a/core/chainlink.Dockerfile +++ b/core/chainlink.Dockerfile @@ -3,7 +3,7 @@ FROM golang:1.21-bullseye as buildgo RUN go version WORKDIR /chainlink -COPY GNUmakefile VERSION ./ +COPY GNUmakefile package.json ./ COPY tools/bin/ldflags ./tools/bin/ ADD go.mod go.sum ./ @@ -14,6 +14,8 @@ ARG COMMIT_SHA COPY . . +RUN apt-get update && apt-get install -y jq + # Build the golang binary RUN make install-chainlink diff --git a/plugins/chainlink.Dockerfile b/plugins/chainlink.Dockerfile index 4d1e2f4e6df..a7e9706435c 100644 --- a/plugins/chainlink.Dockerfile +++ b/plugins/chainlink.Dockerfile @@ -3,7 +3,7 @@ FROM golang:1.21-bullseye as buildgo RUN go version WORKDIR /chainlink -COPY GNUmakefile VERSION ./ +COPY GNUmakefile package.json ./ COPY tools/bin/ldflags ./tools/bin/ ADD go.mod go.sum ./ diff --git a/tools/bin/goreleaser_wrapper b/tools/bin/goreleaser_wrapper index f31f7baf593..d4b16d1c549 100755 --- a/tools/bin/goreleaser_wrapper +++ b/tools/bin/goreleaser_wrapper @@ -37,5 +37,5 @@ ZIG_FLAGS_DARWIN="-isysroot$macos_sdk_dir \ -iwithsysroot$include_search_path \ -mmacosx-version-min=11.7.1" \ ZIG_EXEC=$(which zig) \ -CHAINLINK_VERSION=$(cat VERSION) \ +CHAINLINK_VERSION=$(jq -r '.version' package.json) \ goreleaser "$@" diff --git a/tools/bin/ldflags b/tools/bin/ldflags index e7d72818b96..a9e41f23e30 100755 --- a/tools/bin/ldflags +++ b/tools/bin/ldflags @@ -3,6 +3,6 @@ cd "$(dirname "$0")" COMMIT_SHA=${COMMIT_SHA:-$(git rev-parse HEAD)} -VERSION=${VERSION:-$(cat "../../VERSION")} +VERSION=${VERSION:-$(jq -r '.version' ../../package.json)} echo "-X github.com/smartcontractkit/chainlink/v2/core/static.Version=$VERSION -X github.com/smartcontractkit/chainlink/v2/core/static.Sha=$COMMIT_SHA" From a3d5276e2cf79b5df6f344b04c015661bf257c55 Mon Sep 17 00:00:00 2001 From: Bartek Tofel Date: Tue, 9 Apr 2024 09:43:10 +0200 Subject: [PATCH 03/10] Automatic gas in load tests (#12416) * use experimental seth, use dynamic gas also when sending funds * fix a situation, when we lose transaction timeout setting for networks that are not overwritten * go mod tidy * add gas limit for Fiji, fix a situation when new networks were ignored * update Seth version * fix lints * fix lints * newer seth * newer Seth * use transfer fee not gas limit for sending funds, modify defaults for missing networks * use latest Seth that uses block headers not entire blocks * try new Seth config; more refund logic * use latest seth, fix fund return issue where if a retry was used, only funds from 1st node were returned * do not return, but continue, if one node has has less balance than tx costs on return * go mod tidy * validate seth config before creating k8s env in ocr soak test, better default tx timeout * init seth client, before staring ocr soak test * fix complile errors * go mod * use latest seth * couple of renames and streamlines * use Seth Network urls if provided, otherwise take url from evmnetwork * testconfig will now correctly use custom EVMNetwork * latest Seth; set gas limit to 0 in TOMLs, so that it can be estimated by the node; use urls_secret for Seth Network when set, otherwise use WS endpoints from EVMNetwork; update default TOMLs with new values * go mod * skip funds return for given CL node if balance is 0 * latest seth * gomodtidy + latest seth * fix load compile --------- Co-authored-by: davidcauchi --- integration-tests/actions/seth/actions.go | 72 ++++++++--- integration-tests/actions/seth/refund.go | 82 +++++++++---- integration-tests/chaos/ocr_chaos_test.go | 3 +- .../docker/test_env/test_env_builder.go | 10 +- integration-tests/experiments/gas_test.go | 39 ++++++ integration-tests/go.mod | 4 +- integration-tests/go.sum | 8 +- integration-tests/load/functions/setup.go | 7 +- integration-tests/load/go.mod | 4 +- integration-tests/load/go.sum | 8 +- integration-tests/load/ocr/ocr_test.go | 10 +- integration-tests/soak/ocr_test.go | 16 +++ integration-tests/testconfig/default.toml | 116 ++++++++++++++++-- integration-tests/testconfig/testconfig.go | 1 + .../testconfig/testconfig_utils.go | 4 +- integration-tests/testsetups/ocr.go | 4 +- integration-tests/utils/seth.go | 41 +++---- 17 files changed, 322 insertions(+), 107 deletions(-) create mode 100644 integration-tests/experiments/gas_test.go diff --git a/integration-tests/actions/seth/actions.go b/integration-tests/actions/seth/actions.go index d4bcbc7d867..d8daba3dbc3 100644 --- a/integration-tests/actions/seth/actions.go +++ b/integration-tests/actions/seth/actions.go @@ -97,12 +97,17 @@ type FundsToSendPayload struct { ToAddress common.Address Amount *big.Int PrivateKey *ecdsa.PrivateKey - GasLimit *uint64 + GasLimit *int64 + GasPrice *big.Int + GasFeeCap *big.Int + GasTipCap *big.Int + TxTimeout *time.Duration } // TODO: move to CTF? // SendFunds sends native token amount (expressed in human-scale) from address controlled by private key -// to given address. If no gas limit is set, then network's default will be used. +// to given address. You can override any or none of the following: gas limit, gas price, gas fee cap, gas tip cap. +// Values that are not set will be estimated or taken from config. func SendFunds(logger zerolog.Logger, client *seth.Client, payload FundsToSendPayload) (*types.Receipt, error) { fromAddress, err := privateKeyToAddress(payload.PrivateKey) if err != nil { @@ -117,38 +122,75 @@ func SendFunds(logger zerolog.Logger, client *seth.Client, payload FundsToSendPa } gasLimit := uint64(client.Cfg.Network.TransferGasFee) + gasPrice := big.NewInt(0) + gasFeeCap := big.NewInt(0) + gasTipCap := big.NewInt(0) + if payload.GasLimit != nil { - gasLimit = *payload.GasLimit + gasLimit = uint64(*payload.GasLimit) } - var signedTx *types.Transaction + if client.Cfg.Network.EIP1559DynamicFees { + // if any of the dynamic fees are not set, we need to either estimate them or read them from config + if payload.GasFeeCap == nil || payload.GasTipCap == nil { + // estimatior or config reading happens here + txOptions := client.NewTXOpts(seth.WithGasLimit(gasLimit)) + gasFeeCap = txOptions.GasFeeCap + gasTipCap = txOptions.GasTipCap + } + + // override with payload values if they are set + if payload.GasFeeCap != nil { + gasFeeCap = payload.GasFeeCap + } + + if payload.GasTipCap != nil { + gasTipCap = payload.GasTipCap + } + } + + if !client.Cfg.Network.EIP1559DynamicFees { + if payload.GasPrice == nil { + txOptions := client.NewTXOpts((seth.WithGasLimit(gasLimit))) + gasPrice = txOptions.GasPrice + } else { + gasPrice = payload.GasPrice + } + } + + var rawTx types.TxData if client.Cfg.Network.EIP1559DynamicFees { - rawTx := &types.DynamicFeeTx{ + rawTx = &types.DynamicFeeTx{ Nonce: nonce, To: &payload.ToAddress, Value: payload.Amount, Gas: gasLimit, - GasFeeCap: big.NewInt(client.Cfg.Network.GasFeeCap), - GasTipCap: big.NewInt(client.Cfg.Network.GasTipCap), + GasFeeCap: gasFeeCap, + GasTipCap: gasTipCap, } - signedTx, err = types.SignNewTx(payload.PrivateKey, types.NewLondonSigner(big.NewInt(client.ChainID)), rawTx) } else { - rawTx := &types.LegacyTx{ + rawTx = &types.LegacyTx{ Nonce: nonce, To: &payload.ToAddress, Value: payload.Amount, Gas: gasLimit, - GasPrice: big.NewInt(client.Cfg.Network.GasPrice), + GasPrice: gasPrice, } - signedTx, err = types.SignNewTx(payload.PrivateKey, types.NewEIP155Signer(big.NewInt(client.ChainID)), rawTx) } + signedTx, err := types.SignNewTx(payload.PrivateKey, types.LatestSignerForChainID(big.NewInt(client.ChainID)), rawTx) + if err != nil { return nil, errors.Wrap(err, "failed to sign tx") } - ctx, cancel = context.WithTimeout(ctx, client.Cfg.Network.TxnTimeout.Duration()) + txTimeout := client.Cfg.Network.TxnTimeout.Duration() + if payload.TxTimeout != nil { + txTimeout = *payload.TxTimeout + } + + ctx, cancel = context.WithTimeout(ctx, txTimeout) defer cancel() err = client.Client.SendTransaction(ctx, signedTx) if err != nil { @@ -162,9 +204,9 @@ func SendFunds(logger zerolog.Logger, client *seth.Client, payload FundsToSendPa Str("Amount", conversions.WeiToEther(payload.Amount).String()). Uint64("Nonce", nonce). Uint64("Gas Limit", gasLimit). - Int64("Gas Price", client.Cfg.Network.GasPrice). - Int64("Gas Fee Cap", client.Cfg.Network.GasFeeCap). - Int64("Gas Tip Cap", client.Cfg.Network.GasTipCap). + Str("Gas Price", gasPrice.String()). + Str("Gas Fee Cap", gasFeeCap.String()). + Str("Gas Tip Cap", gasTipCap.String()). Bool("Dynamic fees", client.Cfg.Network.EIP1559DynamicFees). Msg("Sent funds") diff --git a/integration-tests/actions/seth/refund.go b/integration-tests/actions/seth/refund.go index 4b267ffeeb9..9a32d22de5d 100644 --- a/integration-tests/actions/seth/refund.go +++ b/integration-tests/actions/seth/refund.go @@ -9,6 +9,7 @@ import ( "regexp" "strconv" "strings" + "time" "github.com/ethereum/go-ethereum/accounts/keystore" "github.com/ethereum/go-ethereum/common" @@ -19,7 +20,7 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/blockchain" - "github.com/smartcontractkit/chainlink/integration-tests/client" + clClient "github.com/smartcontractkit/chainlink/integration-tests/client" "github.com/smartcontractkit/chainlink/integration-tests/contracts" ) @@ -103,7 +104,7 @@ func (r *InsufficientFundTransferRetrier) Retry(ctx context.Context, logger zero // by doubling the gas limit and retrying until reaching maxGasLimit type GasTooLowTransferRetrier struct { nextRetrier TransactionRetrier - maxGasLimit uint64 + maxGasLimit int64 } func (r *GasTooLowTransferRetrier) Retry(ctx context.Context, logger zerolog.Logger, client *seth.Client, txErr error, payload FundsToSendPayload, currentAttempt int) error { @@ -120,18 +121,18 @@ func (r *GasTooLowTransferRetrier) Retry(ctx context.Context, logger zerolog.Log for txErr != nil && strings.Contains(txErr.Error(), GasTooLowErr) { logger.Info(). Msg("Too low gas error detected, retrying with more gas") - var newGasLimit uint64 + var newGasLimit int64 if payload.GasLimit != nil { newGasLimit = *payload.GasLimit * 2 } else { - newGasLimit = uint64(client.Cfg.Network.TransferGasFee) * 2 + newGasLimit = client.Cfg.Network.TransferGasFee * 2 } logger.Debug(). Str("retier", "GasTooLowTransferRetrier"). - Uint64("old gas limit", newGasLimit/2). - Uint64("new gas limit", newGasLimit). - Uint64("diff", newGasLimit). + Int64("old gas limit", newGasLimit/2). + Int64("new gas limit", newGasLimit). + Int64("diff", newGasLimit). Msg("New gas limit to use") payload.GasLimit = &newGasLimit @@ -231,13 +232,13 @@ func (r *OvershotTransferRetrier) Retry(ctx context.Context, logger zerolog.Logg // ReturnFunds returns funds from the given chainlink nodes to the default network wallet. It will use a variety // of strategies to attempt to return funds, including retrying with less funds if the transaction fails due to // insufficient funds, and retrying with a higher gas limit if the transaction fails due to gas too low. -func ReturnFunds(log zerolog.Logger, seth *seth.Client, chainlinkNodes []contracts.ChainlinkNodeWithKeysAndAddress) error { - if seth == nil { +func ReturnFunds(log zerolog.Logger, sethClient *seth.Client, chainlinkNodes []contracts.ChainlinkNodeWithKeysAndAddress) error { + if sethClient == nil { return fmt.Errorf("Seth client is nil, unable to return funds from chainlink nodes") } log.Info().Msg("Attempting to return Chainlink node funds to default network wallets") - if seth.Cfg.IsSimulatedNetwork() { - log.Info().Str("Network Name", seth.Cfg.Network.Name). + if sethClient.Cfg.IsSimulatedNetwork() { + log.Info().Str("Network Name", sethClient.Cfg.Network.Name). Msg("Network is a simulated network. Skipping fund return.") return nil } @@ -245,7 +246,7 @@ func ReturnFunds(log zerolog.Logger, seth *seth.Client, chainlinkNodes []contrac failedReturns := []common.Address{} for _, chainlinkNode := range chainlinkNodes { - fundedKeys, err := chainlinkNode.ExportEVMKeysForChain(fmt.Sprint(seth.ChainID)) + fundedKeys, err := chainlinkNode.ExportEVMKeysForChain(fmt.Sprint(sethClient.ChainID)) if err != nil { return err } @@ -256,7 +257,7 @@ func ReturnFunds(log zerolog.Logger, seth *seth.Client, chainlinkNodes []contrac } // This can take up a good bit of RAM and time. When running on the remote-test-runner, this can lead to OOM // issues. So we avoid running in parallel; slower, but safer. - decryptedKey, err := keystore.DecryptKey(keyToDecrypt, client.ChainlinkKeyPassword) + decryptedKey, err := keystore.DecryptKey(keyToDecrypt, clClient.ChainlinkKeyPassword) if err != nil { return err } @@ -268,24 +269,48 @@ func ReturnFunds(log zerolog.Logger, seth *seth.Client, chainlinkNodes []contrac } fromAddress := crypto.PubkeyToAddress(*publicKeyECDSA) - balance, err := seth.Client.BalanceAt(context.Background(), fromAddress, nil) + balance, err := sethClient.Client.BalanceAt(context.Background(), fromAddress, nil) if err != nil { return err } - var totalGasCost *big.Int - if seth.Cfg.Network.EIP1559DynamicFees { - totalGasCost = new(big.Int).Mul(big.NewInt(0).SetInt64(seth.Cfg.Network.TransferGasFee), big.NewInt(0).SetInt64(seth.Cfg.Network.GasFeeCap)) + if balance.Cmp(big.NewInt(0)) == 0 { + log.Info(). + Str("Address", fromAddress.String()). + Msg("No balance to return. Skipping return.") + } + + // if not set, it will be just set to empty string, which is okay as long as gas estimation is disabled + txPriority := sethClient.Cfg.Network.GasEstimationTxPriority + txTimeout := sethClient.Cfg.Network.TxnTimeout.Duration() + + if sethClient.Cfg.IsExperimentEnabled(seth.Experiment_SlowFundsReturn) { + txPriority = "slow" + thirtyMinutes := time.Duration(30 * time.Minute) + txTimeout = thirtyMinutes + } + + estimations := sethClient.CalculateGasEstimations(seth.GasEstimationRequest{ + GasEstimationEnabled: sethClient.Cfg.Network.GasEstimationEnabled, + FallbackGasPrice: sethClient.Cfg.Network.GasPrice, + FallbackGasFeeCap: sethClient.Cfg.Network.GasFeeCap, + FallbackGasTipCap: sethClient.Cfg.Network.GasTipCap, + Priority: txPriority, + }) + + var maxTotalGasCost *big.Int + if sethClient.Cfg.Network.EIP1559DynamicFees { + maxTotalGasCost = new(big.Int).Mul(big.NewInt(0).SetInt64(sethClient.Cfg.Network.TransferGasFee), estimations.GasFeeCap) } else { - totalGasCost = new(big.Int).Mul(big.NewInt(0).SetInt64(seth.Cfg.Network.TransferGasFee), big.NewInt(0).SetInt64(seth.Cfg.Network.GasPrice)) + maxTotalGasCost = new(big.Int).Mul(big.NewInt(0).SetInt64(sethClient.Cfg.Network.TransferGasFee), estimations.GasPrice) } - toSend := new(big.Int).Sub(balance, totalGasCost) + toSend := new(big.Int).Sub(balance, maxTotalGasCost) if toSend.Cmp(big.NewInt(0)) <= 0 { log.Warn(). Str("Address", fromAddress.String()). - Str("Estimated total cost", totalGasCost.String()). + Str("Estimated maximum total gas cost", maxTotalGasCost.String()). Str("Balance", balance.String()). Str("To send", toSend.String()). Msg("Not enough balance to cover gas cost. Skipping return.") @@ -294,12 +319,21 @@ func ReturnFunds(log zerolog.Logger, seth *seth.Client, chainlinkNodes []contrac continue } - payload := FundsToSendPayload{ToAddress: seth.Addresses[0], Amount: toSend, PrivateKey: decryptedKey.PrivateKey} + payload := FundsToSendPayload{ + ToAddress: sethClient.Addresses[0], + Amount: toSend, + PrivateKey: decryptedKey.PrivateKey, + GasLimit: &sethClient.Cfg.Network.TransferGasFee, + GasPrice: estimations.GasPrice, + GasFeeCap: estimations.GasFeeCap, + GasTipCap: estimations.GasTipCap, + TxTimeout: &txTimeout, + } - _, err = SendFunds(log, seth, payload) + _, err = SendFunds(log, sethClient, payload) if err != nil { - handler := OvershotTransferRetrier{maxRetries: 10, nextRetrier: &InsufficientFundTransferRetrier{maxRetries: 10, nextRetrier: &GasTooLowTransferRetrier{maxGasLimit: uint64(seth.Cfg.Network.TransferGasFee * 10)}}} - err = handler.Retry(context.Background(), log, seth, err, payload, 0) + handler := OvershotTransferRetrier{maxRetries: 10, nextRetrier: &InsufficientFundTransferRetrier{maxRetries: 10, nextRetrier: &GasTooLowTransferRetrier{maxGasLimit: sethClient.Cfg.Network.TransferGasFee * 10}}} + err = handler.Retry(context.Background(), log, sethClient, err, payload, 0) if err != nil { log.Error(). Err(err). diff --git a/integration-tests/chaos/ocr_chaos_test.go b/integration-tests/chaos/ocr_chaos_test.go index 6e397694cc9..d2e7c540db5 100644 --- a/integration-tests/chaos/ocr_chaos_test.go +++ b/integration-tests/chaos/ocr_chaos_test.go @@ -171,7 +171,8 @@ func TestOCRChaos(t *testing.T) { network := networks.MustGetSelectedNetworkConfig(cfg.GetNetworkConfig())[0] network = utils.MustReplaceSimulatedNetworkUrlWithK8(l, network, *testEnvironment) - sethCfg := utils.MergeSethAndEvmNetworkConfigs(l, network, *readSethCfg) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(network, *readSethCfg) + require.NoError(t, err, "Error merging seth and evm network configs") err = utils.ValidateSethNetworkConfig(sethCfg.Network) require.NoError(t, err, "Error validating seth network config") seth, err := seth.NewClientWithConfig(&sethCfg) diff --git a/integration-tests/docker/test_env/test_env_builder.go b/integration-tests/docker/test_env/test_env_builder.go index 8b1f22137f7..224c364b2d8 100644 --- a/integration-tests/docker/test_env/test_env_builder.go +++ b/integration-tests/docker/test_env/test_env_builder.go @@ -322,7 +322,10 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { if b.hasSeth { readSethCfg := b.testConfig.GetSethConfig() - sethCfg := utils.MergeSethAndEvmNetworkConfigs(b.l, networkConfig, *readSethCfg) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(networkConfig, *readSethCfg) + if err != nil { + return nil, err + } err = utils.ValidateSethNetworkConfig(sethCfg.Network) if err != nil { return nil, err @@ -421,7 +424,10 @@ func (b *CLTestEnvBuilder) Build() (*CLClusterTestEnv, error) { if b.hasSeth { b.te.sethClients = make(map[int64]*seth.Client) readSethCfg := b.testConfig.GetSethConfig() - sethCfg := utils.MergeSethAndEvmNetworkConfigs(b.l, networkConfig, *readSethCfg) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(networkConfig, *readSethCfg) + if err != nil { + return nil, err + } err = utils.ValidateSethNetworkConfig(sethCfg.Network) if err != nil { return nil, err diff --git a/integration-tests/experiments/gas_test.go b/integration-tests/experiments/gas_test.go new file mode 100644 index 00000000000..b3ca8e53a25 --- /dev/null +++ b/integration-tests/experiments/gas_test.go @@ -0,0 +1,39 @@ +package experiments + +import ( + "testing" + "time" + + "github.com/smartcontractkit/seth" + "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink-testing-framework/logging" + "github.com/smartcontractkit/chainlink-testing-framework/networks" + "github.com/smartcontractkit/chainlink/integration-tests/contracts" + tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" + "github.com/smartcontractkit/chainlink/integration-tests/utils" +) + +func TestGasExperiment(t *testing.T) { + l := logging.GetTestLogger(t) + config, err := tc.GetConfig("Soak", tc.OCR) + require.NoError(t, err, "Error getting config") + + network := networks.MustGetSelectedNetworkConfig(config.GetNetworkConfig())[0] + readSethCfg := config.GetSethConfig() + require.NotNil(t, readSethCfg, "Seth config shouldn't be nil") + + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(network, *readSethCfg) + require.NoError(t, err, "Error merging seth and evm network configs") + err = utils.ValidateSethNetworkConfig(sethCfg.Network) + require.NoError(t, err, "Error validating seth network config") + + seth, err := seth.NewClientWithConfig(&sethCfg) + require.NoError(t, err, "Error creating seth client") + + for i := 0; i < 1; i++ { + _, err = contracts.DeployLinkTokenContract(l, seth) + require.NoError(t, err, "Error deploying LINK contract") + time.Sleep(2 * time.Second) + } +} diff --git a/integration-tests/go.mod b/integration-tests/go.mod index 2bc01df21f7..81d6a805abd 100644 --- a/integration-tests/go.mod +++ b/integration-tests/go.mod @@ -29,7 +29,7 @@ require ( github.com/smartcontractkit/chainlink-vrf v0.0.0-20231120191722-fef03814f868 github.com/smartcontractkit/chainlink/v2 v2.0.0-00010101000000-000000000000 github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052 - github.com/smartcontractkit/seth v0.1.2 + github.com/smartcontractkit/seth v0.1.3 github.com/smartcontractkit/wasp v0.4.5 github.com/spf13/cobra v1.8.0 github.com/stretchr/testify v1.9.0 @@ -337,8 +337,6 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect github.com/mwitkow/grpc-proxy v0.0.0-20230212185441-f345521cb9c9 // indirect - github.com/naoina/go-stringutil v0.1.0 // indirect - github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416 // indirect github.com/oklog/run v1.1.0 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect diff --git a/integration-tests/go.sum b/integration-tests/go.sum index 658a77c4f04..16af04e283c 100644 --- a/integration-tests/go.sum +++ b/integration-tests/go.sum @@ -1299,10 +1299,6 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8m github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hzifhks= -github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= -github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416 h1:shk/vn9oCoOTmwcouEdwIeOtOGA/ELRUw/GwvxwfT+0= -github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= @@ -1547,8 +1543,8 @@ github.com/smartcontractkit/grpc-proxy v0.0.0-20230731113816-f1be6620749f h1:hgJ github.com/smartcontractkit/grpc-proxy v0.0.0-20230731113816-f1be6620749f/go.mod h1:MvMXoufZAtqExNexqi4cjrNYE9MefKddKylxjS+//n0= github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052 h1:1WFjrrVrWoQ9UpVMh7Mx4jDpzhmo1h8hFUKd9awIhIU= github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052/go.mod h1:SJEZCHgMCAzzBvo9vMV2DQ9onfEcIJCYSViyP4JI6c4= -github.com/smartcontractkit/seth v0.1.2 h1:ImXJmniuq6yWB6b3eezjV+lkYb1GfQuaJkwRvrCfTKQ= -github.com/smartcontractkit/seth v0.1.2/go.mod h1:aOaGwrIVFG/MYaLSj9UUMyE5QJnYQoAgnxm5cKfT9Ng= +github.com/smartcontractkit/seth v0.1.3 h1:pQc+SJeONWg73lQOiY5ZmBbvvVqEVBmTM9PiJOr+n4s= +github.com/smartcontractkit/seth v0.1.3/go.mod h1:2TMOZQ8WTAw7rR1YBbXpnad6VmT/+xDd/nXLmB7Eero= github.com/smartcontractkit/tdh2/go/ocr2/decryptionplugin v0.0.0-20230906073235-9e478e5e19f1 h1:yiKnypAqP8l0OX0P3klzZ7SCcBUxy5KqTAKZmQOvSQE= github.com/smartcontractkit/tdh2/go/ocr2/decryptionplugin v0.0.0-20230906073235-9e478e5e19f1/go.mod h1:q6f4fe39oZPdsh1i57WznEZgxd8siidMaSFq3wdPmVg= github.com/smartcontractkit/tdh2/go/tdh2 v0.0.0-20230906073235-9e478e5e19f1 h1:Dai1bn+Q5cpeGMQwRdjOdVjG8mmFFROVkSKuUgBErRQ= diff --git a/integration-tests/load/functions/setup.go b/integration-tests/load/functions/setup.go index a6c80279bb9..4e353ff93a9 100644 --- a/integration-tests/load/functions/setup.go +++ b/integration-tests/load/functions/setup.go @@ -53,8 +53,11 @@ type S4SecretsCfg struct { func SetupLocalLoadTestEnv(globalConfig tc.GlobalTestConfig, functionsConfig types.FunctionsTestConfig) (*FunctionsTest, error) { selectedNetwork := networks.MustGetSelectedNetworkConfig(globalConfig.GetNetworkConfig())[0] readSethCfg := globalConfig.GetSethConfig() - sethCfg := utils.MergeSethAndEvmNetworkConfigs(log.Logger, selectedNetwork, *readSethCfg) - err := utils.ValidateSethNetworkConfig(sethCfg.Network) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(selectedNetwork, *readSethCfg) + if err != nil { + return nil, err + } + err = utils.ValidateSethNetworkConfig(sethCfg.Network) if err != nil { return nil, err } diff --git a/integration-tests/load/go.mod b/integration-tests/load/go.mod index e08143040be..4c7c2a1367a 100644 --- a/integration-tests/load/go.mod +++ b/integration-tests/load/go.mod @@ -21,7 +21,7 @@ require ( github.com/smartcontractkit/chainlink/integration-tests v0.0.0-20240214231432-4ad5eb95178c github.com/smartcontractkit/chainlink/v2 v2.9.0-beta0.0.20240216210048-da02459ddad8 github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052 - github.com/smartcontractkit/seth v0.1.2 + github.com/smartcontractkit/seth v0.1.3 github.com/smartcontractkit/tdh2/go/tdh2 v0.0.0-20230906073235-9e478e5e19f1 github.com/smartcontractkit/wasp v0.4.6 github.com/stretchr/testify v1.9.0 @@ -323,8 +323,6 @@ require ( github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect github.com/mwitkow/grpc-proxy v0.0.0-20230212185441-f345521cb9c9 // indirect - github.com/naoina/go-stringutil v0.1.0 // indirect - github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416 // indirect github.com/oklog/run v1.1.0 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect diff --git a/integration-tests/load/go.sum b/integration-tests/load/go.sum index a5c62d390e1..c58a91e9197 100644 --- a/integration-tests/load/go.sum +++ b/integration-tests/load/go.sum @@ -1282,10 +1282,6 @@ github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8m github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hzifhks= -github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= -github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416 h1:shk/vn9oCoOTmwcouEdwIeOtOGA/ELRUw/GwvxwfT+0= -github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= @@ -1532,8 +1528,8 @@ github.com/smartcontractkit/grpc-proxy v0.0.0-20230731113816-f1be6620749f h1:hgJ github.com/smartcontractkit/grpc-proxy v0.0.0-20230731113816-f1be6620749f/go.mod h1:MvMXoufZAtqExNexqi4cjrNYE9MefKddKylxjS+//n0= github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052 h1:1WFjrrVrWoQ9UpVMh7Mx4jDpzhmo1h8hFUKd9awIhIU= github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052/go.mod h1:SJEZCHgMCAzzBvo9vMV2DQ9onfEcIJCYSViyP4JI6c4= -github.com/smartcontractkit/seth v0.1.2 h1:ImXJmniuq6yWB6b3eezjV+lkYb1GfQuaJkwRvrCfTKQ= -github.com/smartcontractkit/seth v0.1.2/go.mod h1:aOaGwrIVFG/MYaLSj9UUMyE5QJnYQoAgnxm5cKfT9Ng= +github.com/smartcontractkit/seth v0.1.3 h1:pQc+SJeONWg73lQOiY5ZmBbvvVqEVBmTM9PiJOr+n4s= +github.com/smartcontractkit/seth v0.1.3/go.mod h1:2TMOZQ8WTAw7rR1YBbXpnad6VmT/+xDd/nXLmB7Eero= github.com/smartcontractkit/tdh2/go/ocr2/decryptionplugin v0.0.0-20230906073235-9e478e5e19f1 h1:yiKnypAqP8l0OX0P3klzZ7SCcBUxy5KqTAKZmQOvSQE= github.com/smartcontractkit/tdh2/go/ocr2/decryptionplugin v0.0.0-20230906073235-9e478e5e19f1/go.mod h1:q6f4fe39oZPdsh1i57WznEZgxd8siidMaSFq3wdPmVg= github.com/smartcontractkit/tdh2/go/tdh2 v0.0.0-20230906073235-9e478e5e19f1 h1:Dai1bn+Q5cpeGMQwRdjOdVjG8mmFFROVkSKuUgBErRQ= diff --git a/integration-tests/load/ocr/ocr_test.go b/integration-tests/load/ocr/ocr_test.go index 0a06206e60d..a1388280e55 100644 --- a/integration-tests/load/ocr/ocr_test.go +++ b/integration-tests/load/ocr/ocr_test.go @@ -9,10 +9,10 @@ import ( "github.com/smartcontractkit/wasp" "github.com/smartcontractkit/chainlink-testing-framework/logging" - tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" - "github.com/smartcontractkit/chainlink/integration-tests/utils" "github.com/smartcontractkit/chainlink/integration-tests/k8s" + tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" + "github.com/smartcontractkit/chainlink/integration-tests/utils" ) var ( @@ -34,7 +34,8 @@ func TestOCRLoad(t *testing.T) { readSethCfg := config.GetSethConfig() require.NotNil(t, readSethCfg, "Seth config shouldn't be nil") - sethCfg := utils.MergeSethAndEvmNetworkConfigs(l, *evmNetwork, *readSethCfg) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(*evmNetwork, *readSethCfg) + require.NoError(t, err, "Error merging seth and evm network configs") seth, err := seth.NewClientWithConfig(&sethCfg) require.NoError(t, err, "Error creating seth client") @@ -75,7 +76,8 @@ func TestOCRVolume(t *testing.T) { readSethCfg := config.GetSethConfig() require.NotNil(t, readSethCfg, "Seth config shouldn't be nil") - sethCfg := utils.MergeSethAndEvmNetworkConfigs(l, *evmNetwork, *readSethCfg) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(*evmNetwork, *readSethCfg) + require.NoError(t, err, "Error merging seth and evm network configs") seth, err := seth.NewClientWithConfig(&sethCfg) require.NoError(t, err, "Error creating seth client") diff --git a/integration-tests/soak/ocr_test.go b/integration-tests/soak/ocr_test.go index e99ecdf072d..100bc6f7ef8 100644 --- a/integration-tests/soak/ocr_test.go +++ b/integration-tests/soak/ocr_test.go @@ -3,13 +3,16 @@ package soak import ( "testing" + "github.com/smartcontractkit/seth" "github.com/stretchr/testify/require" "github.com/smartcontractkit/chainlink-testing-framework/logging" + "github.com/smartcontractkit/chainlink-testing-framework/networks" actions_seth "github.com/smartcontractkit/chainlink/integration-tests/actions/seth" tc "github.com/smartcontractkit/chainlink/integration-tests/testconfig" "github.com/smartcontractkit/chainlink/integration-tests/testsetups" + "github.com/smartcontractkit/chainlink/integration-tests/utils" ) func TestOCRSoak(t *testing.T) { @@ -25,6 +28,19 @@ func TestOCRSoak(t *testing.T) { config, err := tc.GetConfig("Soak", tc.OCR) require.NoError(t, err, "Error getting config") + // validate Seth config before anything else + readSethCfg := config.GetSethConfig() + require.NotNil(t, readSethCfg, "Seth config shouldn't be nil") + + network := networks.MustGetSelectedNetworkConfig(config.GetNetworkConfig())[0] + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(network, *readSethCfg) + require.NoError(t, err, "Error merging seth and evm network configs") + err = utils.ValidateSethNetworkConfig(sethCfg.Network) + require.NoError(t, err, "Error validating seth network config") + + _, err = seth.NewClientWithConfig(&sethCfg) + require.NoError(t, err, "Error creating seth client") + ocrSoakTest, err := testsetups.NewOCRSoakTest(t, &config, false) require.NoError(t, err, "Error creating soak test") if !ocrSoakTest.Interrupted() { diff --git a/integration-tests/testconfig/default.toml b/integration-tests/testconfig/default.toml index d334eaa3c13..92f8bcd7f80 100644 --- a/integration-tests/testconfig/default.toml +++ b/integration-tests/testconfig/default.toml @@ -39,6 +39,10 @@ trace_to_json = false # with the value equal to (root_balance / ephemeral_addresses_number) - transfer_fee * ephemeral_addresses_number ephemeral_addresses_number = 0 +# If enabled we will panic when getting transaction options if current key/address has a pending transaction +# That's because the one we are about to send would get queued, possibly for a very long time +pending_nonce_protection_enabled = false + [Seth.nonce_manager] key_sync_rate_limit_per_sec = 10 key_sync_timeout = "2s" @@ -49,11 +53,17 @@ key_sync_retries = 10 name = "Geth" chain_id = "1337" transaction_timeout = "30s" -urls = ["ws://localhost:8546"] +eip_1559_dynamic_fees = false + +# gas limits transfer_gas_fee = 21_000 -gas_limit = 8_000_000 +# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) +# gas_limit = 8_000_000 + +# manual settings, used when gas_estimation_enabled is false or when it fails # legacy transactions gas_price = 1_000_000_000 + # EIP-1559 transactions #eip_1559_dynamic_fees = true gas_fee_cap = 10_000_000_000 @@ -63,12 +73,32 @@ gas_tip_cap = 3_000_000_000 name = "Fuji" chain_id = "43113" transaction_timeout = "3m" +eip_1559_dynamic_fees = true + +# automated gas estimation for live networks +# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) +# gas_estimation_enabled = true +# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) +# gas_estimation_blocks = 100 +# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) +# gas_estimation_tx_priority = "standard" + +# URLs +# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) +# urls_secret = ["ws://your-ws-url:8546"] + +# gas_limits +# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) +# gas_limit = 8_000_000 +# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys +# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid transfer_gas_fee = 21_000 -gas_limit = 8_000_000 + +# manual settings, used when gas_estimation_enabled is false or when it fails # legacy transactions gas_price = 30_000_000_000 + # EIP-1559 transactions -eip_1559_dynamic_fees = true gas_fee_cap = 30_000_000_000 gas_tip_cap = 1_800_000_000 @@ -76,12 +106,32 @@ gas_tip_cap = 1_800_000_000 name = "Sepolia" chain_id = "11155111" transaction_timeout = "3m" -transfer_gas_fee = 40_000 -gas_limit = 30_000_000 +eip_1559_dynamic_fees = false + +# automated gas estimation for live networks +# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) +# gas_estimation_enabled = true +# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) +# gas_estimation_blocks = 100 +# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) +# gas_estimation_tx_priority = "standard" + +# URLs +# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) +# urls_secret = ["ws://your-ws-url:8546"] + +# gas_limits +# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) +# gas_limit = 14_000_000 +# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys +# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid +transfer_gas_fee = 21_000 + +# manual settings, used when gas_estimation_enabled is false or when it fails # legacy transactions gas_price = 50_000_000_000 + # EIP-1559 transactions -# eip_1559_dynamic_fees = true gas_fee_cap = 45_000_000_000 gas_tip_cap = 10_000_000_000 @@ -89,12 +139,32 @@ gas_tip_cap = 10_000_000_000 name = "Mumbai" chain_id = "80001" transaction_timeout = "3m" -transfer_gas_fee = 40_000 -gas_limit = 6_000_000 +eip_1559_dynamic_fees = true + +# automated gas estimation for live networks +# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) +# gas_estimation_enabled = true +# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) +# gas_estimation_blocks = 100 +# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) +# gas_estimation_tx_priority = "standard" + +# URLs +# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) +# urls_secret = ["ws://your-ws-url:8546"] + +# gas_limits +# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) +# gas_limit = 6_000_000 +# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys +# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid +transfer_gas_fee = 21_000 + +# manual settings, used when gas_estimation_enabled is false or when it fails # legacy transactions -#gas_price = 1_800_000_000 +gas_price = 1_800_000_000 + # EIP-1559 transactions -eip_1559_dynamic_fees = true gas_fee_cap = 3_800_000_000 gas_tip_cap = 1_800_000_000 @@ -102,11 +172,31 @@ gas_tip_cap = 1_800_000_000 name = "zkEVM" chain_id = "1442" transaction_timeout = "3m" +eip_1559_dynamic_fees = false + +# automated gas estimation for live networks +# if set to true we will dynamically estimate gas for every transaction (based on suggested values, priority and congestion rate for last X blocks) +# gas_estimation_enabled = true +# number of blocks to use for congestion rate estimation (it will determine buffer added on top of suggested values) +# gas_estimation_blocks = 100 +# transaction priority, which determines adjustment factor multiplier applied to suggested values (fast - 1.2x, standard - 1x, slow - 0.8x) +# gas_estimation_tx_priority = "standard" + +# URLs +# if set they will overwrite URLs from EVMNetwork that Seth uses, can be either WS(S) or HTTP(S) +# urls_secret = ["ws://your-ws-url:8546"] + +# gas_limits +# gas limit should be explicitly set only if you are connecting to a node that's incapable of estimating gas limit itself (should only happen for very old versions) +# gas_limit = 9_000_000 +# transfer_gas_fee is gas limit that will be used, when funding CL nodes and returning funds from there and when funding and returning funds from ephemeral keys +# we use hardcoded value in order to be estimate how much funds are available for sending or returning after tx costs have been paid transfer_gas_fee = 21_000 -gas_limit = 3_000_000 + +# manual settings, used when gas_estimation_enabled is false or when it fails # legacy transactions gas_price = 50_000_000 + # EIP-1559 transactions -#eip_1559_dynamic_fees = true gas_fee_cap = 3_800_000_000 gas_tip_cap = 1_800_000_000 \ No newline at end of file diff --git a/integration-tests/testconfig/testconfig.go b/integration-tests/testconfig/testconfig.go index 097315c2e98..30a795e1881 100644 --- a/integration-tests/testconfig/testconfig.go +++ b/integration-tests/testconfig/testconfig.go @@ -407,6 +407,7 @@ func (c *TestConfig) readNetworkConfiguration() error { } c.Network.UpperCaseNetworkNames() + c.Network.OverrideURLsAndKeysFromEVMNetwork() err := c.Network.Default() if err != nil { return errors.Wrapf(err, "error reading default network config") diff --git a/integration-tests/testconfig/testconfig_utils.go b/integration-tests/testconfig/testconfig_utils.go index d1803c22650..d4290689bc0 100644 --- a/integration-tests/testconfig/testconfig_utils.go +++ b/integration-tests/testconfig/testconfig_utils.go @@ -10,7 +10,7 @@ import ( // If legacy env vars are found it prints ready to use TOML configuration func MissingImageInfoAsError(errStr string) error { intro := ` -Old configuration approach detected. Please use TOML instead of env vars. +You might have used old configuration approach. If so, use TOML instead of env vars. Please refer to integration-tests/testconfig/README.md for more information. ` @@ -41,7 +41,7 @@ Or if you want to run your tests right now add following content to integration- // If legacy env var is found it prints ready to use TOML configuration. func NoSelectedNetworkInfoAsError(errStr string) error { intro := ` -Old configuration approach detected. Please use TOML instead of env vars. +You might have used old configuration approach. If so, use TOML instead of env vars. Please refer to integration-tests/testconfig/README.md for more information. ` diff --git a/integration-tests/testsetups/ocr.go b/integration-tests/testsetups/ocr.go index d30797ac507..caf9da48e21 100644 --- a/integration-tests/testsetups/ocr.go +++ b/integration-tests/testsetups/ocr.go @@ -113,6 +113,7 @@ func (o *OCRSoakTest) DeployEnvironment(customChainlinkNetworkTOML string, ocrTe nsPre = fmt.Sprintf("%sforwarder-", nsPre) } nsPre = fmt.Sprintf("%s%s", nsPre, strings.ReplaceAll(strings.ToLower(network.Name), " ", "-")) + nsPre = strings.ReplaceAll(nsPre, "_", "-") baseEnvironmentConfig := &environment.Config{ TTL: time.Hour * 720, // 30 days, NamespacePrefix: nsPre, @@ -170,7 +171,8 @@ func (o *OCRSoakTest) Setup(ocrTestConfig tt.OcrTestConfig) { readSethCfg := ocrTestConfig.GetSethConfig() require.NotNil(o.t, readSethCfg, "Seth config shouldn't be nil") - sethCfg := utils.MergeSethAndEvmNetworkConfigs(o.log, network, *readSethCfg) + sethCfg, err := utils.MergeSethAndEvmNetworkConfigs(network, *readSethCfg) + require.NoError(o.t, err, "Error merging seth and evm network configs") err = utils.ValidateSethNetworkConfig(sethCfg.Network) require.NoError(o.t, err, "Error validating seth network config") diff --git a/integration-tests/utils/seth.go b/integration-tests/utils/seth.go index c2d4f743a6b..ef9b331a447 100644 --- a/integration-tests/utils/seth.go +++ b/integration-tests/utils/seth.go @@ -14,10 +14,10 @@ import ( // MergeSethAndEvmNetworkConfigs merges EVMNetwork to Seth config. If Seth config already has Network settings, // it will return unchanged Seth config that was passed to it. If the network is simulated, it will // use Geth-specific settings. Otherwise it will use the chain ID to find the correct network settings. -// If no match is found it will use default settings (currently based on Sepolia network settings). -func MergeSethAndEvmNetworkConfigs(l zerolog.Logger, evmNetwork blockchain.EVMNetwork, sethConfig seth.Config) seth.Config { +// If no match is found it will return error. +func MergeSethAndEvmNetworkConfigs(evmNetwork blockchain.EVMNetwork, sethConfig seth.Config) (seth.Config, error) { if sethConfig.Network != nil { - return sethConfig + return sethConfig, nil } var sethNetwork *seth.Network @@ -26,16 +26,25 @@ func MergeSethAndEvmNetworkConfigs(l zerolog.Logger, evmNetwork blockchain.EVMNe if evmNetwork.Simulated { if conf.Name == seth.GETH { conf.PrivateKeys = evmNetwork.PrivateKeys - conf.URLs = evmNetwork.URLs + if len(conf.URLs) == 0 { + conf.URLs = evmNetwork.URLs + } // important since Besu doesn't support EIP-1559, but other EVM clients do conf.EIP1559DynamicFees = evmNetwork.SupportsEIP1559 + // might be needed for cases, when node is incapable of estimating gas limit (e.g. Geth < v1.10.0) + if evmNetwork.DefaultGasLimit != 0 { + conf.GasLimit = evmNetwork.DefaultGasLimit + } + sethNetwork = conf break } } else if conf.ChainID == fmt.Sprint(evmNetwork.ChainID) { conf.PrivateKeys = evmNetwork.PrivateKeys - conf.URLs = evmNetwork.URLs + if len(conf.URLs) == 0 { + conf.URLs = evmNetwork.URLs + } sethNetwork = conf break @@ -43,27 +52,12 @@ func MergeSethAndEvmNetworkConfigs(l zerolog.Logger, evmNetwork blockchain.EVMNe } if sethNetwork == nil { - //TODO in the future we could run gas estimator here - l.Warn(). - Int64("chainID", evmNetwork.ChainID). - Msg("Could not find any Seth network settings for chain ID. Using default network settings") - sethNetwork = &seth.Network{} - sethNetwork.PrivateKeys = evmNetwork.PrivateKeys - sethNetwork.URLs = evmNetwork.URLs - sethNetwork.EIP1559DynamicFees = evmNetwork.SupportsEIP1559 - sethNetwork.ChainID = fmt.Sprint(evmNetwork.ChainID) - // Sepolia settings - sethNetwork.GasLimit = 14_000_000 - sethNetwork.GasPrice = 1_000_000_000 - sethNetwork.GasFeeCap = 25_000_000_000 - sethNetwork.GasTipCap = 5_000_000_000 - sethNetwork.TransferGasFee = 21_000 - sethNetwork.TxnTimeout = seth.MustMakeDuration(evmNetwork.Timeout.Duration) + return seth.Config{}, fmt.Errorf("No matching EVM network found for chain ID %d. If it's a new network please define it as [Network.EVMNetworks.NETWORK_NAME] in TOML", evmNetwork.ChainID) } sethConfig.Network = sethNetwork - return sethConfig + return sethConfig, nil } // MustReplaceSimulatedNetworkUrlWithK8 replaces the simulated network URL with the K8 URL and returns the network. @@ -108,9 +102,6 @@ func ValidateSethNetworkConfig(cfg *seth.Network) error { if cfg.TxnTimeout.Duration() == 0 { return fmt.Errorf("TxnTimeout needs to be above 0. It's the timeout for a transaction") } - if cfg.GasLimit == 0 { - return fmt.Errorf("GasLimit needs to be above 0. It's the gas limit for a transaction") - } if cfg.EIP1559DynamicFees { if cfg.GasFeeCap == 0 { return fmt.Errorf("GasFeeCap needs to be above 0. It's the maximum fee per gas for a transaction (including tip)") From 46a6ea21988771c9a4d46f9183ea28f3beb05e4f Mon Sep 17 00:00:00 2001 From: Cedric Date: Tue, 9 Apr 2024 11:15:06 +0100 Subject: [PATCH 04/10] [fix] Install jq when building plugins image (#12750) --- plugins/chainlink.Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/chainlink.Dockerfile b/plugins/chainlink.Dockerfile index a7e9706435c..9c7f71183f0 100644 --- a/plugins/chainlink.Dockerfile +++ b/plugins/chainlink.Dockerfile @@ -14,6 +14,8 @@ ARG COMMIT_SHA COPY . . +RUN apt-get update && apt-get install -y jq + # Build the golang binaries RUN make install-chainlink From bd9f16edc7f17f9eba539e0a2f311e466e1d69bd Mon Sep 17 00:00:00 2001 From: Rens Rooimans Date: Tue, 9 Apr 2024 15:24:20 +0200 Subject: [PATCH 05/10] Fix issues in package.json (#12748) * bump split test npm * fix babel traverse * fix json5 * Bump follow-redirects from 1.15.2 to 1.15.6 * Bump undici from 5.19.1 to 5.28.4 --- .github/actions/split-tests/package.json | 16 +- .github/actions/split-tests/pnpm-lock.yaml | 1181 +++++++++++++------- contracts/pnpm-lock.yaml | 35 +- 3 files changed, 818 insertions(+), 414 deletions(-) diff --git a/.github/actions/split-tests/package.json b/.github/actions/split-tests/package.json index 1624bda7b37..82555aa28bb 100644 --- a/.github/actions/split-tests/package.json +++ b/.github/actions/split-tests/package.json @@ -12,15 +12,15 @@ "author": "", "license": "MIT", "dependencies": { - "@actions/core": "^1.10.0", - "ts-node": "^10.9.1", - "zx": "^7.0.8" + "@actions/core": "^1.10.1", + "ts-node": "^10.9.2", + "zx": "^7.2.3" }, "devDependencies": { - "@types/jest": "^29.1.2", - "@types/node": "^18.8.2", - "jest": "^29.1.2", - "ts-jest": "^29.0.3", - "typescript": "^5.2.2" + "@types/jest": "^29.5.12", + "@types/node": "^18.19.31", + "jest": "^29.7.0", + "ts-jest": "^29.1.2", + "typescript": "^5.4.4" } } diff --git a/.github/actions/split-tests/pnpm-lock.yaml b/.github/actions/split-tests/pnpm-lock.yaml index 9b5deb258dc..201cf0e57ef 100644 --- a/.github/actions/split-tests/pnpm-lock.yaml +++ b/.github/actions/split-tests/pnpm-lock.yaml @@ -6,36 +6,36 @@ settings: dependencies: '@actions/core': - specifier: ^1.10.0 - version: 1.10.0 + specifier: ^1.10.1 + version: 1.10.1 ts-node: - specifier: ^10.9.1 - version: 10.9.1(@types/node@18.8.2)(typescript@5.2.2) + specifier: ^10.9.2 + version: 10.9.2(@types/node@18.19.31)(typescript@5.4.4) zx: - specifier: ^7.0.8 - version: 7.0.8 + specifier: ^7.2.3 + version: 7.2.3 devDependencies: '@types/jest': - specifier: ^29.1.2 - version: 29.1.2 + specifier: ^29.5.12 + version: 29.5.12 '@types/node': - specifier: ^18.8.2 - version: 18.8.2 + specifier: ^18.19.31 + version: 18.19.31 jest: - specifier: ^29.1.2 - version: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) + specifier: ^29.7.0 + version: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) ts-jest: - specifier: ^29.0.3 - version: 29.0.3(@babel/core@7.19.3)(jest@29.1.2)(typescript@5.2.2) + specifier: ^29.1.2 + version: 29.1.2(@babel/core@7.19.3)(jest@29.7.0)(typescript@5.4.4) typescript: - specifier: ^5.2.2 - version: 5.2.2 + specifier: ^5.4.4 + version: 5.4.4 packages: - /@actions/core@1.10.0: - resolution: {integrity: sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==} + /@actions/core@1.10.1: + resolution: {integrity: sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==} dependencies: '@actions/http-client': 2.0.1 uuid: 8.3.2 @@ -52,7 +52,7 @@ packages: engines: {node: '>=6.0.0'} dependencies: '@jridgewell/gen-mapping': 0.1.1 - '@jridgewell/trace-mapping': 0.3.15 + '@jridgewell/trace-mapping': 0.3.25 dev: true /@babel/code-frame@7.18.6: @@ -62,11 +62,24 @@ packages: '@babel/highlight': 7.18.6 dev: true + /@babel/code-frame@7.24.2: + resolution: {integrity: sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/highlight': 7.24.2 + picocolors: 1.0.0 + dev: true + /@babel/compat-data@7.19.3: resolution: {integrity: sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==} engines: {node: '>=6.9.0'} dev: true + /@babel/compat-data@7.24.4: + resolution: {integrity: sha512-vg8Gih2MLK+kOkHJp4gBEIkyaIi00jgWot2D9QOmmfLC8jINSOzmCLta6Bvz/JSBCqnegV0L80jhxkol5GWNfQ==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/core@7.19.3: resolution: {integrity: sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==} engines: {node: '>=6.9.0'} @@ -79,17 +92,40 @@ packages: '@babel/helpers': 7.19.0 '@babel/parser': 7.19.3 '@babel/template': 7.18.10 - '@babel/traverse': 7.19.3 + '@babel/traverse': 7.24.1 '@babel/types': 7.19.3 convert-source-map: 1.8.0 debug: 4.3.4 gensync: 1.0.0-beta.2 - json5: 2.2.1 + json5: 2.2.3 semver: 6.3.0 transitivePeerDependencies: - supports-color dev: true + /@babel/core@7.24.4: + resolution: {integrity: sha512-MBVlMXP+kkl5394RBLSxxk/iLTeVGuXTV3cIDXavPpMMqnSnt6apKgan/U8O3USWZCWZT/TbgfEpKa4uMgN4Dg==} + engines: {node: '>=6.9.0'} + dependencies: + '@ampproject/remapping': 2.2.0 + '@babel/code-frame': 7.24.2 + '@babel/generator': 7.24.4 + '@babel/helper-compilation-targets': 7.23.6 + '@babel/helper-module-transforms': 7.23.3(@babel/core@7.24.4) + '@babel/helpers': 7.24.4 + '@babel/parser': 7.24.4 + '@babel/template': 7.24.0 + '@babel/traverse': 7.24.1 + '@babel/types': 7.24.0 + convert-source-map: 2.0.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/generator@7.19.3: resolution: {integrity: sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==} engines: {node: '>=6.9.0'} @@ -99,6 +135,16 @@ packages: jsesc: 2.5.2 dev: true + /@babel/generator@7.24.4: + resolution: {integrity: sha512-Xd6+v6SnjWVx/nus+y0l1sxMOTOMBkyL4+BIdbALyatQnAe/SRVjANeDPSCYaX+i1iJmuGSKf3Z+E+V/va1Hvw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.24.0 + '@jridgewell/gen-mapping': 0.3.5 + '@jridgewell/trace-mapping': 0.3.25 + jsesc: 2.5.2 + dev: true + /@babel/helper-compilation-targets@7.19.3(@babel/core@7.19.3): resolution: {integrity: sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==} engines: {node: '>=6.9.0'} @@ -112,11 +158,27 @@ packages: semver: 6.3.0 dev: true + /@babel/helper-compilation-targets@7.23.6: + resolution: {integrity: sha512-9JB548GZoQVmzrFgp8o7KxdgkTGm6xs9DW0o/Pim72UDjzr5ObUQ6ZzYPqA+g9OTS2bBQoctLJrky0RDCAWRgQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/compat-data': 7.24.4 + '@babel/helper-validator-option': 7.23.5 + browserslist: 4.23.0 + lru-cache: 5.1.1 + semver: 6.3.1 + dev: true + /@babel/helper-environment-visitor@7.18.9: resolution: {integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==} engines: {node: '>=6.9.0'} dev: true + /@babel/helper-environment-visitor@7.22.20: + resolution: {integrity: sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-function-name@7.19.0: resolution: {integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==} engines: {node: '>=6.9.0'} @@ -125,6 +187,14 @@ packages: '@babel/types': 7.19.3 dev: true + /@babel/helper-function-name@7.23.0: + resolution: {integrity: sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.24.0 + '@babel/types': 7.24.0 + dev: true + /@babel/helper-hoist-variables@7.18.6: resolution: {integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==} engines: {node: '>=6.9.0'} @@ -132,6 +202,13 @@ packages: '@babel/types': 7.19.3 dev: true + /@babel/helper-hoist-variables@7.22.5: + resolution: {integrity: sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.24.0 + dev: true + /@babel/helper-module-imports@7.18.6: resolution: {integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==} engines: {node: '>=6.9.0'} @@ -139,6 +216,13 @@ packages: '@babel/types': 7.19.3 dev: true + /@babel/helper-module-imports@7.24.3: + resolution: {integrity: sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.24.0 + dev: true + /@babel/helper-module-transforms@7.19.0: resolution: {integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==} engines: {node: '>=6.9.0'} @@ -149,12 +233,26 @@ packages: '@babel/helper-split-export-declaration': 7.18.6 '@babel/helper-validator-identifier': 7.19.1 '@babel/template': 7.18.10 - '@babel/traverse': 7.19.3 + '@babel/traverse': 7.24.1 '@babel/types': 7.19.3 transitivePeerDependencies: - supports-color dev: true + /@babel/helper-module-transforms@7.23.3(@babel/core@7.24.4): + resolution: {integrity: sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + dependencies: + '@babel/core': 7.24.4 + '@babel/helper-environment-visitor': 7.22.20 + '@babel/helper-module-imports': 7.24.3 + '@babel/helper-simple-access': 7.22.5 + '@babel/helper-split-export-declaration': 7.22.6 + '@babel/helper-validator-identifier': 7.22.20 + dev: true + /@babel/helper-plugin-utils@7.19.0: resolution: {integrity: sha512-40Ryx7I8mT+0gaNxm8JGTZFUITNqdLAgdg0hXzeVZxVD6nFsdhQvip6v8dqkRHzsz1VFpFAaOCHNn0vKBL7Czw==} engines: {node: '>=6.9.0'} @@ -167,6 +265,13 @@ packages: '@babel/types': 7.19.3 dev: true + /@babel/helper-simple-access@7.22.5: + resolution: {integrity: sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.24.0 + dev: true + /@babel/helper-split-export-declaration@7.18.6: resolution: {integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==} engines: {node: '>=6.9.0'} @@ -174,32 +279,65 @@ packages: '@babel/types': 7.19.3 dev: true + /@babel/helper-split-export-declaration@7.22.6: + resolution: {integrity: sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/types': 7.24.0 + dev: true + /@babel/helper-string-parser@7.18.10: resolution: {integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==} engines: {node: '>=6.9.0'} dev: true + /@babel/helper-string-parser@7.24.1: + resolution: {integrity: sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-validator-identifier@7.19.1: resolution: {integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==} engines: {node: '>=6.9.0'} dev: true + /@babel/helper-validator-identifier@7.22.20: + resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helper-validator-option@7.18.6: resolution: {integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==} engines: {node: '>=6.9.0'} dev: true + /@babel/helper-validator-option@7.23.5: + resolution: {integrity: sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==} + engines: {node: '>=6.9.0'} + dev: true + /@babel/helpers@7.19.0: resolution: {integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==} engines: {node: '>=6.9.0'} dependencies: '@babel/template': 7.18.10 - '@babel/traverse': 7.19.3 + '@babel/traverse': 7.24.1 '@babel/types': 7.19.3 transitivePeerDependencies: - supports-color dev: true + /@babel/helpers@7.24.4: + resolution: {integrity: sha512-FewdlZbSiwaVGlgT1DPANDuCHaDMiOo+D/IDYRFYjHOuv66xMSJ7fQwwODwRNAPkADIO/z1EoF/l2BCWlWABDw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/template': 7.24.0 + '@babel/traverse': 7.24.1 + '@babel/types': 7.24.0 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/highlight@7.18.6: resolution: {integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==} engines: {node: '>=6.9.0'} @@ -209,6 +347,16 @@ packages: js-tokens: 4.0.0 dev: true + /@babel/highlight@7.24.2: + resolution: {integrity: sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.22.20 + chalk: 2.4.2 + js-tokens: 4.0.0 + picocolors: 1.0.0 + dev: true + /@babel/parser@7.19.3: resolution: {integrity: sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==} engines: {node: '>=6.0.0'} @@ -217,6 +365,14 @@ packages: '@babel/types': 7.19.3 dev: true + /@babel/parser@7.24.4: + resolution: {integrity: sha512-zTvEBcghmeBma9QIGunWevvBAp4/Qu9Bdq+2k0Ot4fVMD6v3dsC9WOcRSKk7tRRyBM/53yKMJko9xOatGQAwSg==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.19.3 + dev: true + /@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.19.3): resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} peerDependencies: @@ -355,18 +511,27 @@ packages: '@babel/types': 7.19.3 dev: true - /@babel/traverse@7.19.3: - resolution: {integrity: sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==} + /@babel/template@7.24.0: + resolution: {integrity: sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==} engines: {node: '>=6.9.0'} dependencies: - '@babel/code-frame': 7.18.6 - '@babel/generator': 7.19.3 - '@babel/helper-environment-visitor': 7.18.9 - '@babel/helper-function-name': 7.19.0 - '@babel/helper-hoist-variables': 7.18.6 - '@babel/helper-split-export-declaration': 7.18.6 - '@babel/parser': 7.19.3 - '@babel/types': 7.19.3 + '@babel/code-frame': 7.24.2 + '@babel/parser': 7.24.4 + '@babel/types': 7.24.0 + dev: true + + /@babel/traverse@7.24.1: + resolution: {integrity: sha512-xuU6o9m68KeqZbQuDt2TcKSxUw/mrsvavlEqQ1leZ/B+C9tk6E4sRWy97WaXgvq5E+nU3cXMxv3WKOCanVMCmQ==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.24.2 + '@babel/generator': 7.24.4 + '@babel/helper-environment-visitor': 7.22.20 + '@babel/helper-function-name': 7.23.0 + '@babel/helper-hoist-variables': 7.22.5 + '@babel/helper-split-export-declaration': 7.22.6 + '@babel/parser': 7.24.4 + '@babel/types': 7.24.0 debug: 4.3.4 globals: 11.12.0 transitivePeerDependencies: @@ -382,6 +547,15 @@ packages: to-fast-properties: 2.0.0 dev: true + /@babel/types@7.24.0: + resolution: {integrity: sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-string-parser': 7.24.1 + '@babel/helper-validator-identifier': 7.22.20 + to-fast-properties: 2.0.0 + dev: true + /@bcoe/v8-coverage@0.2.3: resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} dev: true @@ -408,20 +582,20 @@ packages: engines: {node: '>=8'} dev: true - /@jest/console@29.1.2: - resolution: {integrity: sha512-ujEBCcYs82BTmRxqfHMQggSlkUZP63AE5YEaTPj7eFyJOzukkTorstOUC7L6nE3w5SYadGVAnTsQ/ZjTGL0qYQ==} + /@jest/console@29.7.0: + resolution: {integrity: sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 chalk: 4.1.2 - jest-message-util: 29.1.2 - jest-util: 29.1.2 + jest-message-util: 29.7.0 + jest-util: 29.7.0 slash: 3.0.0 dev: true - /@jest/core@29.1.2(ts-node@10.9.1): - resolution: {integrity: sha512-sCO2Va1gikvQU2ynDN8V4+6wB7iVrD2CvT0zaRst4rglf56yLly0NQ9nuRRAWFeimRf+tCdFsb1Vk1N9LrrMPA==} + /@jest/core@29.7.0(ts-node@10.9.2): + resolution: {integrity: sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 @@ -429,47 +603,48 @@ packages: node-notifier: optional: true dependencies: - '@jest/console': 29.1.2 - '@jest/reporters': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@jest/console': 29.7.0 + '@jest/reporters': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 ansi-escapes: 4.3.2 chalk: 4.1.2 ci-info: 3.4.0 exit: 0.1.2 graceful-fs: 4.2.10 - jest-changed-files: 29.0.0 - jest-config: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - jest-haste-map: 29.1.2 - jest-message-util: 29.1.2 - jest-regex-util: 29.0.0 - jest-resolve: 29.1.2 - jest-resolve-dependencies: 29.1.2 - jest-runner: 29.1.2 - jest-runtime: 29.1.2 - jest-snapshot: 29.1.2 - jest-util: 29.1.2 - jest-validate: 29.1.2 - jest-watcher: 29.1.2 + jest-changed-files: 29.7.0 + jest-config: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) + jest-haste-map: 29.7.0 + jest-message-util: 29.7.0 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-resolve-dependencies: 29.7.0 + jest-runner: 29.7.0 + jest-runtime: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 + jest-watcher: 29.7.0 micromatch: 4.0.5 - pretty-format: 29.1.2 + pretty-format: 29.7.0 slash: 3.0.0 strip-ansi: 6.0.1 transitivePeerDependencies: + - babel-plugin-macros - supports-color - ts-node dev: true - /@jest/environment@29.1.2: - resolution: {integrity: sha512-rG7xZ2UeOfvOVzoLIJ0ZmvPl4tBEQ2n73CZJSlzUjPw4or1oSWC0s0Rk0ZX+pIBJ04aVr6hLWFn1DFtrnf8MhQ==} + /@jest/environment@29.7.0: + resolution: {integrity: sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/fake-timers': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - jest-mock: 29.1.2 + '@jest/fake-timers': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 + jest-mock: 29.7.0 dev: true /@jest/expect-utils@29.1.2: @@ -479,42 +654,49 @@ packages: jest-get-type: 29.0.0 dev: true - /@jest/expect@29.1.2: - resolution: {integrity: sha512-FXw/UmaZsyfRyvZw3M6POgSNqwmuOXJuzdNiMWW9LCYo0GRoRDhg+R5iq5higmRTHQY7hx32+j7WHwinRmoILQ==} + /@jest/expect-utils@29.7.0: + resolution: {integrity: sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - expect: 29.1.2 - jest-snapshot: 29.1.2 + jest-get-type: 29.6.3 + dev: true + + /@jest/expect@29.7.0: + resolution: {integrity: sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + expect: 29.7.0 + jest-snapshot: 29.7.0 transitivePeerDependencies: - supports-color dev: true - /@jest/fake-timers@29.1.2: - resolution: {integrity: sha512-GppaEqS+QQYegedxVMpCe2xCXxxeYwQ7RsNx55zc8f+1q1qevkZGKequfTASI7ejmg9WwI+SJCrHe9X11bLL9Q==} + /@jest/fake-timers@29.7.0: + resolution: {integrity: sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/types': 29.1.2 - '@sinonjs/fake-timers': 9.1.2 - '@types/node': 18.8.2 - jest-message-util: 29.1.2 - jest-mock: 29.1.2 - jest-util: 29.1.2 + '@jest/types': 29.6.3 + '@sinonjs/fake-timers': 10.3.0 + '@types/node': 18.19.31 + jest-message-util: 29.7.0 + jest-mock: 29.7.0 + jest-util: 29.7.0 dev: true - /@jest/globals@29.1.2: - resolution: {integrity: sha512-uMgfERpJYoQmykAd0ffyMq8wignN4SvLUG6orJQRe9WAlTRc9cdpCaE/29qurXixYJVZWUqIBXhSk8v5xN1V9g==} + /@jest/globals@29.7.0: + resolution: {integrity: sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/environment': 29.1.2 - '@jest/expect': 29.1.2 - '@jest/types': 29.1.2 - jest-mock: 29.1.2 + '@jest/environment': 29.7.0 + '@jest/expect': 29.7.0 + '@jest/types': 29.6.3 + jest-mock: 29.7.0 transitivePeerDependencies: - supports-color dev: true - /@jest/reporters@29.1.2: - resolution: {integrity: sha512-X4fiwwyxy9mnfpxL0g9DD0KcTmEIqP0jUdnc2cfa9riHy+I6Gwwp5vOZiwyg0vZxfSDxrOlK9S4+340W4d+DAA==} + /@jest/reporters@29.7.0: + resolution: {integrity: sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 @@ -523,29 +705,28 @@ packages: optional: true dependencies: '@bcoe/v8-coverage': 0.2.3 - '@jest/console': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@jridgewell/trace-mapping': 0.3.15 - '@types/node': 18.8.2 + '@jest/console': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@jridgewell/trace-mapping': 0.3.25 + '@types/node': 18.19.31 chalk: 4.1.2 collect-v8-coverage: 1.0.1 exit: 0.1.2 glob: 7.2.3 graceful-fs: 4.2.10 istanbul-lib-coverage: 3.2.0 - istanbul-lib-instrument: 5.2.1 + istanbul-lib-instrument: 6.0.2 istanbul-lib-report: 3.0.0 istanbul-lib-source-maps: 4.0.1 istanbul-reports: 3.1.5 - jest-message-util: 29.1.2 - jest-util: 29.1.2 - jest-worker: 29.1.2 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + jest-worker: 29.7.0 slash: 3.0.0 string-length: 4.0.2 strip-ansi: 6.0.1 - terminal-link: 2.1.1 v8-to-istanbul: 9.0.1 transitivePeerDependencies: - supports-color @@ -558,50 +739,57 @@ packages: '@sinclair/typebox': 0.24.44 dev: true - /@jest/source-map@29.0.0: - resolution: {integrity: sha512-nOr+0EM8GiHf34mq2GcJyz/gYFyLQ2INDhAylrZJ9mMWoW21mLBfZa0BUVPPMxVYrLjeiRe2Z7kWXOGnS0TFhQ==} + /@jest/schemas@29.6.3: + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jridgewell/trace-mapping': 0.3.15 + '@sinclair/typebox': 0.27.8 + dev: true + + /@jest/source-map@29.6.3: + resolution: {integrity: sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jridgewell/trace-mapping': 0.3.25 callsites: 3.1.0 graceful-fs: 4.2.10 dev: true - /@jest/test-result@29.1.2: - resolution: {integrity: sha512-jjYYjjumCJjH9hHCoMhA8PCl1OxNeGgAoZ7yuGYILRJX9NjgzTN0pCT5qAoYR4jfOP8htIByvAlz9vfNSSBoVg==} + /@jest/test-result@29.7.0: + resolution: {integrity: sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/console': 29.1.2 - '@jest/types': 29.1.2 + '@jest/console': 29.7.0 + '@jest/types': 29.6.3 '@types/istanbul-lib-coverage': 2.0.4 collect-v8-coverage: 1.0.1 dev: true - /@jest/test-sequencer@29.1.2: - resolution: {integrity: sha512-fU6dsUqqm8sA+cd85BmeF7Gu9DsXVWFdGn9taxM6xN1cKdcP/ivSgXh5QucFRFz1oZxKv3/9DYYbq0ULly3P/Q==} + /@jest/test-sequencer@29.7.0: + resolution: {integrity: sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/test-result': 29.1.2 + '@jest/test-result': 29.7.0 graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 + jest-haste-map: 29.7.0 slash: 3.0.0 dev: true - /@jest/transform@29.1.2: - resolution: {integrity: sha512-2uaUuVHTitmkx1tHF+eBjb4p7UuzBG7SXIaA/hNIkaMP6K+gXYGxP38ZcrofzqN0HeZ7A90oqsOa97WU7WZkSw==} + /@jest/transform@29.7.0: + resolution: {integrity: sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@babel/core': 7.19.3 - '@jest/types': 29.1.2 - '@jridgewell/trace-mapping': 0.3.15 + '@jest/types': 29.6.3 + '@jridgewell/trace-mapping': 0.3.25 babel-plugin-istanbul: 6.1.1 chalk: 4.1.2 - convert-source-map: 1.8.0 + convert-source-map: 2.0.0 fast-json-stable-stringify: 2.1.0 graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - jest-regex-util: 29.0.0 - jest-util: 29.1.2 + jest-haste-map: 29.7.0 + jest-regex-util: 29.6.3 + jest-util: 29.7.0 micromatch: 4.0.5 pirates: 4.0.5 slash: 3.0.0 @@ -617,7 +805,19 @@ packages: '@jest/schemas': 29.0.0 '@types/istanbul-lib-coverage': 2.0.4 '@types/istanbul-reports': 3.0.1 - '@types/node': 18.8.2 + '@types/node': 18.19.31 + '@types/yargs': 17.0.13 + chalk: 4.1.2 + dev: true + + /@jest/types@29.6.3: + resolution: {integrity: sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/schemas': 29.6.3 + '@types/istanbul-lib-coverage': 2.0.4 + '@types/istanbul-reports': 3.0.1 + '@types/node': 18.19.31 '@types/yargs': 17.0.13 chalk: 4.1.2 dev: true @@ -639,6 +839,15 @@ packages: '@jridgewell/trace-mapping': 0.3.15 dev: true + /@jridgewell/gen-mapping@0.3.5: + resolution: {integrity: sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==} + engines: {node: '>=6.0.0'} + dependencies: + '@jridgewell/set-array': 1.2.1 + '@jridgewell/sourcemap-codec': 1.4.14 + '@jridgewell/trace-mapping': 0.3.25 + dev: true + /@jridgewell/resolve-uri@3.1.0: resolution: {integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==} engines: {node: '>=6.0.0'} @@ -648,6 +857,11 @@ packages: engines: {node: '>=6.0.0'} dev: true + /@jridgewell/set-array@1.2.1: + resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} + engines: {node: '>=6.0.0'} + dev: true + /@jridgewell/sourcemap-codec@1.4.14: resolution: {integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==} @@ -658,6 +872,13 @@ packages: '@jridgewell/sourcemap-codec': 1.4.14 dev: true + /@jridgewell/trace-mapping@0.3.25: + resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + dev: true + /@jridgewell/trace-mapping@0.3.9: resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} dependencies: @@ -689,16 +910,20 @@ packages: resolution: {integrity: sha512-ka0W0KN5i6LfrSocduwliMMpqVgohtPFidKdMEOUjoOFCHcOOYkKsPRxfs5f15oPNHTm6ERAm0GV/+/LTKeiWg==} dev: true - /@sinonjs/commons@1.8.3: - resolution: {integrity: sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==} + /@sinclair/typebox@0.27.8: + resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + dev: true + + /@sinonjs/commons@3.0.1: + resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==} dependencies: type-detect: 4.0.8 dev: true - /@sinonjs/fake-timers@9.1.2: - resolution: {integrity: sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==} + /@sinonjs/fake-timers@10.3.0: + resolution: {integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==} dependencies: - '@sinonjs/commons': 1.8.3 + '@sinonjs/commons': 3.0.1 dev: true /@tsconfig/node10@1.0.9: @@ -742,16 +967,17 @@ packages: '@babel/types': 7.19.3 dev: true - /@types/fs-extra@9.0.13: - resolution: {integrity: sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==} + /@types/fs-extra@11.0.4: + resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} dependencies: - '@types/node': 18.8.2 + '@types/jsonfile': 6.1.4 + '@types/node': 18.19.31 dev: false /@types/graceful-fs@4.1.5: resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} dependencies: - '@types/node': 18.8.2 + '@types/node': 18.19.31 dev: true /@types/istanbul-lib-coverage@2.0.4: @@ -770,23 +996,27 @@ packages: '@types/istanbul-lib-report': 3.0.0 dev: true - /@types/jest@29.1.2: - resolution: {integrity: sha512-y+nlX0h87U0R+wsGn6EBuoRWYyv3KFtwRNP3QWp9+k2tJ2/bqcGS3UxD7jgT+tiwJWWq3UsyV4Y+T6rsMT4XMg==} + /@types/jest@29.5.12: + resolution: {integrity: sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==} dependencies: expect: 29.1.2 pretty-format: 29.1.2 dev: true + /@types/jsonfile@6.1.4: + resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} + dependencies: + '@types/node': 18.19.31 + dev: false + /@types/minimist@1.2.2: resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} dev: false - /@types/node@18.8.2: - resolution: {integrity: sha512-cRMwIgdDN43GO4xMWAfJAecYn8wV4JbsOGHNfNUIDiuYkUYAR5ec4Rj7IO2SAhFPEfpPtLtUTbbny/TCT7aDwA==} - - /@types/prettier@2.7.1: - resolution: {integrity: sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow==} - dev: true + /@types/node@18.19.31: + resolution: {integrity: sha512-ArgCD39YpyyrtFKIqMDvjz79jto5fcI/SVUs2HwB+f0dAzq68yqOdyaSivLiLugSziTpNXLQrVb7RZFmdZzbhA==} + dependencies: + undici-types: 5.26.5 /@types/ps-tree@1.1.2: resolution: {integrity: sha512-ZREFYlpUmPQJ0esjxoG1fMvB2HNaD3z+mjqdSosZvd3RalncI9NEur73P8ZJz4YQdL64CmV1w0RuqoRUlhQRBw==} @@ -796,8 +1026,8 @@ packages: resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} dev: true - /@types/which@2.0.1: - resolution: {integrity: sha512-Jjakcv8Roqtio6w1gr0D7y6twbhx6gGgFGF5BLwajPpnOIOxFkakFhCq+LmyyeAz7BX6ULrjBOxdKaCDy+4+dQ==} + /@types/which@3.0.3: + resolution: {integrity: sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g==} dev: false /@types/yargs-parser@21.0.0: @@ -867,17 +1097,17 @@ packages: sprintf-js: 1.0.3 dev: true - /babel-jest@29.1.2(@babel/core@7.19.3): - resolution: {integrity: sha512-IuG+F3HTHryJb7gacC7SQ59A9kO56BctUsT67uJHp1mMCHUOMXpDwOHWGifWqdWVknN2WNkCVQELPjXx0aLJ9Q==} + /babel-jest@29.7.0(@babel/core@7.19.3): + resolution: {integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: '@babel/core': ^7.8.0 dependencies: '@babel/core': 7.19.3 - '@jest/transform': 29.1.2 + '@jest/transform': 29.7.0 '@types/babel__core': 7.1.19 babel-plugin-istanbul: 6.1.1 - babel-preset-jest: 29.0.2(@babel/core@7.19.3) + babel-preset-jest: 29.6.3(@babel/core@7.19.3) chalk: 4.1.2 graceful-fs: 4.2.10 slash: 3.0.0 @@ -898,8 +1128,8 @@ packages: - supports-color dev: true - /babel-plugin-jest-hoist@29.0.2: - resolution: {integrity: sha512-eBr2ynAEFjcebVvu8Ktx580BD1QKCrBG1XwEUTXJe285p9HA/4hOhfWCFRQhTKSyBV0VzjhG7H91Eifz9s29hg==} + /babel-plugin-jest-hoist@29.6.3: + resolution: {integrity: sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@babel/template': 7.18.10 @@ -928,14 +1158,14 @@ packages: '@babel/plugin-syntax-top-level-await': 7.14.5(@babel/core@7.19.3) dev: true - /babel-preset-jest@29.0.2(@babel/core@7.19.3): - resolution: {integrity: sha512-BeVXp7rH5TK96ofyEnHjznjLMQ2nAeDJ+QzxKnHAAMs0RgrQsCywjAN8m4mOm5Di0pxU//3AoEeJJrerMH5UeA==} + /babel-preset-jest@29.6.3(@babel/core@7.19.3): + resolution: {integrity: sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: '@babel/core': ^7.0.0 dependencies: '@babel/core': 7.19.3 - babel-plugin-jest-hoist: 29.0.2 + babel-plugin-jest-hoist: 29.6.3 babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.3) dev: true @@ -967,6 +1197,17 @@ packages: update-browserslist-db: 1.0.10(browserslist@4.21.4) dev: true + /browserslist@4.23.0: + resolution: {integrity: sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + dependencies: + caniuse-lite: 1.0.30001607 + electron-to-chromium: 1.4.730 + node-releases: 2.0.14 + update-browserslist-db: 1.0.13(browserslist@4.23.0) + dev: true + /bs-logger@0.2.6: resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} engines: {node: '>= 6'} @@ -1003,6 +1244,10 @@ packages: resolution: {integrity: sha512-oIs7+JL3K9JRQ3jPZjlH6qyYDp+nBTCais7hjh0s+fuBwufc7uZ7hPYMXrDOJhV360KGMTcczMRObk0/iMqZRg==} dev: true + /caniuse-lite@1.0.30001607: + resolution: {integrity: sha512-WcvhVRjXLKFB/kmOFVwELtMxyhq3iM/MvmXcyCe2PNf166c39mptscOc/45TTS96n2gpNV2z7+NakArTWZCQ3w==} + dev: true + /chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} @@ -1020,8 +1265,8 @@ packages: supports-color: 7.2.0 dev: true - /chalk@5.0.1: - resolution: {integrity: sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==} + /chalk@5.3.0: + resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} dev: false @@ -1087,6 +1332,29 @@ packages: safe-buffer: 5.1.2 dev: true + /convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + dev: true + + /create-jest@29.7.0(@types/node@18.19.31)(ts-node@10.9.2): + resolution: {integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + hasBin: true + dependencies: + '@jest/types': 29.6.3 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.10 + jest-config: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) + jest-util: 29.7.0 + prompts: 2.4.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + dev: true + /create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} @@ -1116,8 +1384,13 @@ packages: ms: 2.1.2 dev: true - /dedent@0.7.0: - resolution: {integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==} + /dedent@1.5.1: + resolution: {integrity: sha512-+LxW+KLWxu3HW3M2w2ympwtqPrqYRzU8fqi6Fhd18fBALe15blJPI/I4+UHveMVG6lJqB4JNd4UG0S5cnVHwIg==} + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true dev: true /deepmerge@4.2.2: @@ -1135,6 +1408,11 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dev: true + /diff-sequences@29.6.3: + resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dev: true + /diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} @@ -1154,8 +1432,12 @@ packages: resolution: {integrity: sha512-aJeQQ+Hl9Jyyzv4chBqYJwmVRY46N5i2BEX5Cuyk/5gFCUZ5F3i7Hnba6snZftWla7Gglwc5pIgcd+E7cW+rPg==} dev: true - /emittery@0.10.2: - resolution: {integrity: sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw==} + /electron-to-chromium@1.4.730: + resolution: {integrity: sha512-oJRPo82XEqtQAobHpJIR3zW5YO3sSRRkPz2an4yxi1UvqhsGm54vR/wzTFV74a3soDOJ8CKW7ajOOX5ESzddwg==} + dev: true + + /emittery@0.13.1: + resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==} engines: {node: '>=12'} dev: true @@ -1191,7 +1473,7 @@ packages: dev: true /event-stream@3.3.4: - resolution: {integrity: sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==} + resolution: {integrity: sha1-SrTJoPWlTbkzi0w02Gv86PSzVXE=} dependencies: duplexer: 0.1.2 from: 0.1.7 @@ -1233,8 +1515,19 @@ packages: jest-util: 29.1.2 dev: true - /fast-glob@3.2.12: - resolution: {integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==} + /expect@29.7.0: + resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/expect-utils': 29.7.0 + jest-get-type: 29.6.3 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + dev: true + + /fast-glob@3.3.2: + resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} engines: {node: '>=8.6.0'} dependencies: '@nodelib/fs.stat': 2.0.5 @@ -1293,9 +1586,9 @@ packages: resolution: {integrity: sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g==} dev: false - /fs-extra@10.1.0: - resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} - engines: {node: '>=12'} + /fs-extra@11.2.0: + resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} + engines: {node: '>=14.14'} dependencies: graceful-fs: 4.2.10 jsonfile: 6.1.0 @@ -1318,6 +1611,11 @@ packages: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} dev: true + /fx@34.0.0: + resolution: {integrity: sha512-/fZih3/WLsrtlaj2mahjWxAmyuikmcl3D5kKPqLtFmEilLsy9wp0+/vEmfvYXXhwJc+ajtCFDCf+yttXmPMHSQ==} + hasBin: true + dev: false + /gensync@1.0.0-beta.2: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} @@ -1361,13 +1659,13 @@ packages: engines: {node: '>=4'} dev: true - /globby@13.1.2: - resolution: {integrity: sha512-LKSDZXToac40u8Q1PQtZihbNdTYSNMuWe+K5l+oa6KgDzSvVrHXlJy40hUP522RjAIoNLJYBJi7ow+rbFpIhHQ==} + /globby@13.2.2: + resolution: {integrity: sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} dependencies: dir-glob: 3.0.1 - fast-glob: 3.2.12 - ignore: 5.2.0 + fast-glob: 3.3.2 + ignore: 5.3.1 merge2: 1.4.1 slash: 4.0.0 dev: false @@ -1401,8 +1699,8 @@ packages: engines: {node: '>=10.17.0'} dev: true - /ignore@5.2.0: - resolution: {integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==} + /ignore@5.3.1: + resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} engines: {node: '>= 4'} dev: false @@ -1493,6 +1791,19 @@ packages: - supports-color dev: true + /istanbul-lib-instrument@6.0.2: + resolution: {integrity: sha512-1WUsZ9R1lA0HtBSohTkm39WTPlNKSJ5iFk7UwqXkBLoHQT+hfqPsfsTDVuZdKGaBwn7din9bS7SsnoAr943hvw==} + engines: {node: '>=10'} + dependencies: + '@babel/core': 7.24.4 + '@babel/parser': 7.24.4 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.0 + semver: 7.6.0 + transitivePeerDependencies: + - supports-color + dev: true + /istanbul-lib-report@3.0.0: resolution: {integrity: sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==} engines: {node: '>=8'} @@ -1521,43 +1832,46 @@ packages: istanbul-lib-report: 3.0.0 dev: true - /jest-changed-files@29.0.0: - resolution: {integrity: sha512-28/iDMDrUpGoCitTURuDqUzWQoWmOmOKOFST1mi2lwh62X4BFf6khgH3uSuo1e49X/UDjuApAj3w0wLOex4VPQ==} + /jest-changed-files@29.7.0: + resolution: {integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: execa: 5.1.1 + jest-util: 29.7.0 p-limit: 3.1.0 dev: true - /jest-circus@29.1.2: - resolution: {integrity: sha512-ajQOdxY6mT9GtnfJRZBRYS7toNIJayiiyjDyoZcnvPRUPwJ58JX0ci0PKAKUo2C1RyzlHw0jabjLGKksO42JGA==} + /jest-circus@29.7.0: + resolution: {integrity: sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/environment': 29.1.2 - '@jest/expect': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@jest/environment': 29.7.0 + '@jest/expect': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 chalk: 4.1.2 co: 4.6.0 - dedent: 0.7.0 + dedent: 1.5.1 is-generator-fn: 2.1.0 - jest-each: 29.1.2 - jest-matcher-utils: 29.1.2 - jest-message-util: 29.1.2 - jest-runtime: 29.1.2 - jest-snapshot: 29.1.2 - jest-util: 29.1.2 + jest-each: 29.7.0 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-runtime: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 p-limit: 3.1.0 - pretty-format: 29.1.2 + pretty-format: 29.7.0 + pure-rand: 6.1.0 slash: 3.0.0 stack-utils: 2.0.5 transitivePeerDependencies: + - babel-plugin-macros - supports-color dev: true - /jest-cli@29.1.2(@types/node@18.8.2)(ts-node@10.9.1): - resolution: {integrity: sha512-vsvBfQ7oS2o4MJdAH+4u9z76Vw5Q8WBQF5MchDbkylNknZdrPTX1Ix7YRJyTlOWqRaS7ue/cEAn+E4V1MWyMzw==} + /jest-cli@29.7.0(@types/node@18.19.31)(ts-node@10.9.2): + resolution: {integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true peerDependencies: @@ -1566,26 +1880,26 @@ packages: node-notifier: optional: true dependencies: - '@jest/core': 29.1.2(ts-node@10.9.1) - '@jest/test-result': 29.1.2 - '@jest/types': 29.1.2 + '@jest/core': 29.7.0(ts-node@10.9.2) + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 chalk: 4.1.2 + create-jest: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) exit: 0.1.2 - graceful-fs: 4.2.10 import-local: 3.1.0 - jest-config: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) - jest-util: 29.1.2 - jest-validate: 29.1.2 - prompts: 2.4.2 + jest-config: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) + jest-util: 29.7.0 + jest-validate: 29.7.0 yargs: 17.6.0 transitivePeerDependencies: - '@types/node' + - babel-plugin-macros - supports-color - ts-node dev: true - /jest-config@29.1.2(@types/node@18.8.2)(ts-node@10.9.1): - resolution: {integrity: sha512-EC3Zi86HJUOz+2YWQcJYQXlf0zuBhJoeyxLM6vb6qJsVmpP7KcCP1JnyF0iaqTaXdBP8Rlwsvs7hnKWQWWLwwA==} + /jest-config@29.7.0(@types/node@18.19.31)(ts-node@10.9.2): + resolution: {integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: '@types/node': '*' @@ -1597,30 +1911,31 @@ packages: optional: true dependencies: '@babel/core': 7.19.3 - '@jest/test-sequencer': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - babel-jest: 29.1.2(@babel/core@7.19.3) + '@jest/test-sequencer': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 + babel-jest: 29.7.0(@babel/core@7.19.3) chalk: 4.1.2 ci-info: 3.4.0 deepmerge: 4.2.2 glob: 7.2.3 graceful-fs: 4.2.10 - jest-circus: 29.1.2 - jest-environment-node: 29.1.2 - jest-get-type: 29.0.0 - jest-regex-util: 29.0.0 - jest-resolve: 29.1.2 - jest-runner: 29.1.2 - jest-util: 29.1.2 - jest-validate: 29.1.2 + jest-circus: 29.7.0 + jest-environment-node: 29.7.0 + jest-get-type: 29.6.3 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-runner: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 micromatch: 4.0.5 parse-json: 5.2.0 - pretty-format: 29.1.2 + pretty-format: 29.7.0 slash: 3.0.0 strip-json-comments: 3.1.1 - ts-node: 10.9.1(@types/node@18.8.2)(typescript@5.2.2) + ts-node: 10.9.2(@types/node@18.19.31)(typescript@5.4.4) transitivePeerDependencies: + - babel-plugin-macros - supports-color dev: true @@ -1634,34 +1949,44 @@ packages: pretty-format: 29.1.2 dev: true - /jest-docblock@29.0.0: - resolution: {integrity: sha512-s5Kpra/kLzbqu9dEjov30kj1n4tfu3e7Pl8v+f8jOkeWNqM6Ds8jRaJfZow3ducoQUrf2Z4rs2N5S3zXnb83gw==} + /jest-diff@29.7.0: + resolution: {integrity: sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + chalk: 4.1.2 + diff-sequences: 29.6.3 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + dev: true + + /jest-docblock@29.7.0: + resolution: {integrity: sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: detect-newline: 3.1.0 dev: true - /jest-each@29.1.2: - resolution: {integrity: sha512-AmTQp9b2etNeEwMyr4jc0Ql/LIX/dhbgP21gHAizya2X6rUspHn2gysMXaj6iwWuOJ2sYRgP8c1P4cXswgvS1A==} + /jest-each@29.7.0: + resolution: {integrity: sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/types': 29.1.2 + '@jest/types': 29.6.3 chalk: 4.1.2 - jest-get-type: 29.0.0 - jest-util: 29.1.2 - pretty-format: 29.1.2 + jest-get-type: 29.6.3 + jest-util: 29.7.0 + pretty-format: 29.7.0 dev: true - /jest-environment-node@29.1.2: - resolution: {integrity: sha512-C59yVbdpY8682u6k/lh8SUMDJPbOyCHOTgLVVi1USWFxtNV+J8fyIwzkg+RJIVI30EKhKiAGNxYaFr3z6eyNhQ==} + /jest-environment-node@29.7.0: + resolution: {integrity: sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/environment': 29.1.2 - '@jest/fake-timers': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - jest-mock: 29.1.2 - jest-util: 29.1.2 + '@jest/environment': 29.7.0 + '@jest/fake-timers': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 + jest-mock: 29.7.0 + jest-util: 29.7.0 dev: true /jest-get-type@29.0.0: @@ -1669,31 +1994,36 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dev: true - /jest-haste-map@29.1.2: - resolution: {integrity: sha512-xSjbY8/BF11Jh3hGSPfYTa/qBFrm3TPM7WU8pU93m2gqzORVLkHFWvuZmFsTEBPRKndfewXhMOuzJNHyJIZGsw==} + /jest-get-type@29.6.3: + resolution: {integrity: sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dev: true + + /jest-haste-map@29.7.0: + resolution: {integrity: sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/types': 29.1.2 + '@jest/types': 29.6.3 '@types/graceful-fs': 4.1.5 - '@types/node': 18.8.2 + '@types/node': 18.19.31 anymatch: 3.1.2 fb-watchman: 2.0.2 graceful-fs: 4.2.10 - jest-regex-util: 29.0.0 - jest-util: 29.1.2 - jest-worker: 29.1.2 + jest-regex-util: 29.6.3 + jest-util: 29.7.0 + jest-worker: 29.7.0 micromatch: 4.0.5 walker: 1.0.8 optionalDependencies: fsevents: 2.3.2 dev: true - /jest-leak-detector@29.1.2: - resolution: {integrity: sha512-TG5gAZJpgmZtjb6oWxBLf2N6CfQ73iwCe6cofu/Uqv9iiAm6g502CAnGtxQaTfpHECBdVEMRBhomSXeLnoKjiQ==} + /jest-leak-detector@29.7.0: + resolution: {integrity: sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - jest-get-type: 29.0.0 - pretty-format: 29.1.2 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 dev: true /jest-matcher-utils@29.1.2: @@ -1706,6 +2036,16 @@ packages: pretty-format: 29.1.2 dev: true + /jest-matcher-utils@29.7.0: + resolution: {integrity: sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + chalk: 4.1.2 + jest-diff: 29.7.0 + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + dev: true + /jest-message-util@29.1.2: resolution: {integrity: sha512-9oJ2Os+Qh6IlxLpmvshVbGUiSkZVc2FK+uGOm6tghafnB2RyjKAxMZhtxThRMxfX1J1SOMhTn9oK3/MutRWQJQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -1721,16 +2061,31 @@ packages: stack-utils: 2.0.5 dev: true - /jest-mock@29.1.2: - resolution: {integrity: sha512-PFDAdjjWbjPUtQPkQufvniXIS3N9Tv7tbibePEjIIprzjgo0qQlyUiVMrT4vL8FaSJo1QXifQUOuPH3HQC/aMA==} + /jest-message-util@29.7.0: + resolution: {integrity: sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/types': 29.1.2 - '@types/node': 18.8.2 - jest-util: 29.1.2 + '@babel/code-frame': 7.18.6 + '@jest/types': 29.6.3 + '@types/stack-utils': 2.0.1 + chalk: 4.1.2 + graceful-fs: 4.2.10 + micromatch: 4.0.5 + pretty-format: 29.7.0 + slash: 3.0.0 + stack-utils: 2.0.5 + dev: true + + /jest-mock@29.7.0: + resolution: {integrity: sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/types': 29.6.3 + '@types/node': 18.19.31 + jest-util: 29.7.0 dev: true - /jest-pnp-resolver@1.2.2(jest-resolve@29.1.2): + /jest-pnp-resolver@1.2.2(jest-resolve@29.7.0): resolution: {integrity: sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w==} engines: {node: '>=6'} peerDependencies: @@ -1739,126 +2094,122 @@ packages: jest-resolve: optional: true dependencies: - jest-resolve: 29.1.2 + jest-resolve: 29.7.0 dev: true - /jest-regex-util@29.0.0: - resolution: {integrity: sha512-BV7VW7Sy0fInHWN93MMPtlClweYv2qrSCwfeFWmpribGZtQPWNvRSq9XOVgOEjU1iBGRKXUZil0o2AH7Iy9Lug==} + /jest-regex-util@29.6.3: + resolution: {integrity: sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dev: true - /jest-resolve-dependencies@29.1.2: - resolution: {integrity: sha512-44yYi+yHqNmH3OoWZvPgmeeiwKxhKV/0CfrzaKLSkZG9gT973PX8i+m8j6pDrTYhhHoiKfF3YUFg/6AeuHw4HQ==} + /jest-resolve-dependencies@29.7.0: + resolution: {integrity: sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - jest-regex-util: 29.0.0 - jest-snapshot: 29.1.2 + jest-regex-util: 29.6.3 + jest-snapshot: 29.7.0 transitivePeerDependencies: - supports-color dev: true - /jest-resolve@29.1.2: - resolution: {integrity: sha512-7fcOr+k7UYSVRJYhSmJHIid3AnDBcLQX3VmT9OSbPWsWz1MfT7bcoerMhADKGvKCoMpOHUQaDHtQoNp/P9JMGg==} + /jest-resolve@29.7.0: + resolution: {integrity: sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: chalk: 4.1.2 graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - jest-pnp-resolver: 1.2.2(jest-resolve@29.1.2) - jest-util: 29.1.2 - jest-validate: 29.1.2 + jest-haste-map: 29.7.0 + jest-pnp-resolver: 1.2.2(jest-resolve@29.7.0) + jest-util: 29.7.0 + jest-validate: 29.7.0 resolve: 1.22.1 - resolve.exports: 1.1.0 + resolve.exports: 2.0.2 slash: 3.0.0 dev: true - /jest-runner@29.1.2: - resolution: {integrity: sha512-yy3LEWw8KuBCmg7sCGDIqKwJlULBuNIQa2eFSVgVASWdXbMYZ9H/X0tnXt70XFoGf92W2sOQDOIFAA6f2BG04Q==} + /jest-runner@29.7.0: + resolution: {integrity: sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/console': 29.1.2 - '@jest/environment': 29.1.2 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@jest/console': 29.7.0 + '@jest/environment': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 chalk: 4.1.2 - emittery: 0.10.2 + emittery: 0.13.1 graceful-fs: 4.2.10 - jest-docblock: 29.0.0 - jest-environment-node: 29.1.2 - jest-haste-map: 29.1.2 - jest-leak-detector: 29.1.2 - jest-message-util: 29.1.2 - jest-resolve: 29.1.2 - jest-runtime: 29.1.2 - jest-util: 29.1.2 - jest-watcher: 29.1.2 - jest-worker: 29.1.2 + jest-docblock: 29.7.0 + jest-environment-node: 29.7.0 + jest-haste-map: 29.7.0 + jest-leak-detector: 29.7.0 + jest-message-util: 29.7.0 + jest-resolve: 29.7.0 + jest-runtime: 29.7.0 + jest-util: 29.7.0 + jest-watcher: 29.7.0 + jest-worker: 29.7.0 p-limit: 3.1.0 source-map-support: 0.5.13 transitivePeerDependencies: - supports-color dev: true - /jest-runtime@29.1.2: - resolution: {integrity: sha512-jr8VJLIf+cYc+8hbrpt412n5jX3tiXmpPSYTGnwcvNemY+EOuLNiYnHJ3Kp25rkaAcTWOEI4ZdOIQcwYcXIAZw==} + /jest-runtime@29.7.0: + resolution: {integrity: sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/environment': 29.1.2 - '@jest/fake-timers': 29.1.2 - '@jest/globals': 29.1.2 - '@jest/source-map': 29.0.0 - '@jest/test-result': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@jest/environment': 29.7.0 + '@jest/fake-timers': 29.7.0 + '@jest/globals': 29.7.0 + '@jest/source-map': 29.6.3 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 chalk: 4.1.2 cjs-module-lexer: 1.2.2 collect-v8-coverage: 1.0.1 glob: 7.2.3 graceful-fs: 4.2.10 - jest-haste-map: 29.1.2 - jest-message-util: 29.1.2 - jest-mock: 29.1.2 - jest-regex-util: 29.0.0 - jest-resolve: 29.1.2 - jest-snapshot: 29.1.2 - jest-util: 29.1.2 + jest-haste-map: 29.7.0 + jest-message-util: 29.7.0 + jest-mock: 29.7.0 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 slash: 3.0.0 strip-bom: 4.0.0 transitivePeerDependencies: - supports-color dev: true - /jest-snapshot@29.1.2: - resolution: {integrity: sha512-rYFomGpVMdBlfwTYxkUp3sjD6usptvZcONFYNqVlaz4EpHPnDvlWjvmOQ9OCSNKqYZqLM2aS3wq01tWujLg7gg==} + /jest-snapshot@29.7.0: + resolution: {integrity: sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@babel/core': 7.19.3 '@babel/generator': 7.19.3 '@babel/plugin-syntax-jsx': 7.18.6(@babel/core@7.19.3) '@babel/plugin-syntax-typescript': 7.18.6(@babel/core@7.19.3) - '@babel/traverse': 7.19.3 '@babel/types': 7.19.3 - '@jest/expect-utils': 29.1.2 - '@jest/transform': 29.1.2 - '@jest/types': 29.1.2 - '@types/babel__traverse': 7.18.2 - '@types/prettier': 2.7.1 + '@jest/expect-utils': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 babel-preset-current-node-syntax: 1.0.1(@babel/core@7.19.3) chalk: 4.1.2 - expect: 29.1.2 + expect: 29.7.0 graceful-fs: 4.2.10 - jest-diff: 29.1.2 - jest-get-type: 29.0.0 - jest-haste-map: 29.1.2 - jest-matcher-utils: 29.1.2 - jest-message-util: 29.1.2 - jest-util: 29.1.2 + jest-diff: 29.7.0 + jest-get-type: 29.6.3 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-util: 29.7.0 natural-compare: 1.4.0 - pretty-format: 29.1.2 - semver: 7.3.8 + pretty-format: 29.7.0 + semver: 7.6.0 transitivePeerDependencies: - supports-color dev: true @@ -1868,51 +2219,63 @@ packages: engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@types/node': 18.19.31 chalk: 4.1.2 ci-info: 3.4.0 graceful-fs: 4.2.10 picomatch: 2.3.1 dev: true - /jest-validate@29.1.2: - resolution: {integrity: sha512-k71pOslNlV8fVyI+mEySy2pq9KdXdgZtm7NHrBX8LghJayc3wWZH0Yr0mtYNGaCU4F1OLPXRkwZR0dBm/ClshA==} + /jest-util@29.7.0: + resolution: {integrity: sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/types': 29.1.2 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 + chalk: 4.1.2 + ci-info: 3.4.0 + graceful-fs: 4.2.10 + picomatch: 2.3.1 + dev: true + + /jest-validate@29.7.0: + resolution: {integrity: sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/types': 29.6.3 camelcase: 6.3.0 chalk: 4.1.2 - jest-get-type: 29.0.0 + jest-get-type: 29.6.3 leven: 3.1.0 - pretty-format: 29.1.2 + pretty-format: 29.7.0 dev: true - /jest-watcher@29.1.2: - resolution: {integrity: sha512-6JUIUKVdAvcxC6bM8/dMgqY2N4lbT+jZVsxh0hCJRbwkIEnbr/aPjMQ28fNDI5lB51Klh00MWZZeVf27KBUj5w==} + /jest-watcher@29.7.0: + resolution: {integrity: sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@jest/test-result': 29.1.2 - '@jest/types': 29.1.2 - '@types/node': 18.8.2 + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 18.19.31 ansi-escapes: 4.3.2 chalk: 4.1.2 - emittery: 0.10.2 - jest-util: 29.1.2 + emittery: 0.13.1 + jest-util: 29.7.0 string-length: 4.0.2 dev: true - /jest-worker@29.1.2: - resolution: {integrity: sha512-AdTZJxKjTSPHbXT/AIOjQVmoFx0LHFcVabWu0sxI7PAy7rFf8c0upyvgBKgguVXdM4vY74JdwkyD4hSmpTW8jA==} + /jest-worker@29.7.0: + resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} dependencies: - '@types/node': 18.8.2 - jest-util: 29.1.2 + '@types/node': 18.19.31 + jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 dev: true - /jest@29.1.2(@types/node@18.8.2)(ts-node@10.9.1): - resolution: {integrity: sha512-5wEIPpCezgORnqf+rCaYD1SK+mNN7NsstWzIsuvsnrhR/hSxXWd82oI7DkrbJ+XTD28/eG8SmxdGvukrGGK6Tw==} + /jest@29.7.0(@types/node@18.19.31)(ts-node@10.9.2): + resolution: {integrity: sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} hasBin: true peerDependencies: @@ -1921,12 +2284,13 @@ packages: node-notifier: optional: true dependencies: - '@jest/core': 29.1.2(ts-node@10.9.1) - '@jest/types': 29.1.2 + '@jest/core': 29.7.0(ts-node@10.9.2) + '@jest/types': 29.6.3 import-local: 3.1.0 - jest-cli: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) + jest-cli: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) transitivePeerDependencies: - '@types/node' + - babel-plugin-macros - supports-color - ts-node dev: true @@ -1953,8 +2317,8 @@ packages: resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} dev: true - /json5@2.2.1: - resolution: {integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==} + /json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} engines: {node: '>=6'} hasBin: true dev: true @@ -1992,6 +2356,12 @@ packages: resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} dev: true + /lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + dependencies: + yallist: 3.1.1 + dev: true + /lru-cache@6.0.0: resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} engines: {node: '>=10'} @@ -2046,8 +2416,8 @@ packages: brace-expansion: 1.1.11 dev: true - /minimist@1.2.6: - resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==} + /minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} dev: false /ms@2.1.2: @@ -2063,8 +2433,8 @@ packages: engines: {node: '>=10.5.0'} dev: false - /node-fetch@3.2.8: - resolution: {integrity: sha512-KtpD1YhGszhntMpBDyp5lyagk8KIMopC1LEb7cQUAh7zcosaX5uK8HnbNb2i3NTQK3sIawCItS0uFC3QzcLHdg==} + /node-fetch@3.3.1: + resolution: {integrity: sha512-cRVc/kyto/7E5shrWca1Wsea4y6tL9iYJE5FBCius3JQfb/4P4I295PfhgbJQBLTx6lATE4z+wK0rPM4VS2uow==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} dependencies: data-uri-to-buffer: 4.0.0 @@ -2076,6 +2446,10 @@ packages: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true + /node-releases@2.0.14: + resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + dev: true + /node-releases@2.0.6: resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} dev: true @@ -2166,7 +2540,7 @@ packages: dev: false /pause-stream@0.0.11: - resolution: {integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A==} + resolution: {integrity: sha1-/lo0sMvOErWqaitAPuLnO2AvFEU=} dependencies: through: 2.3.8 dev: false @@ -2200,6 +2574,15 @@ packages: react-is: 18.2.0 dev: true + /pretty-format@29.7.0: + resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@jest/schemas': 29.6.3 + ansi-styles: 5.2.0 + react-is: 18.2.0 + dev: true + /prompts@2.4.2: resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} engines: {node: '>= 6'} @@ -2216,6 +2599,10 @@ packages: event-stream: 3.3.4 dev: false + /pure-rand@6.1.0: + resolution: {integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==} + dev: true + /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: false @@ -2241,8 +2628,8 @@ packages: engines: {node: '>=8'} dev: true - /resolve.exports@1.1.0: - resolution: {integrity: sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==} + /resolve.exports@2.0.2: + resolution: {integrity: sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==} engines: {node: '>=10'} dev: true @@ -2275,8 +2662,13 @@ packages: hasBin: true dev: true - /semver@7.3.8: - resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==} + /semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + dev: true + + /semver@7.6.0: + resolution: {integrity: sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==} engines: {node: '>=10'} hasBin: true dependencies: @@ -2408,27 +2800,11 @@ packages: has-flag: 4.0.0 dev: true - /supports-hyperlinks@2.3.0: - resolution: {integrity: sha512-RpsAZlpWcDwOPQA22aCH4J0t7L8JmAvsCxfOSEwm7cQs3LshN36QaTkwd70DnBOXDWGssw2eUoc8CaRWT0XunA==} - engines: {node: '>=8'} - dependencies: - has-flag: 4.0.0 - supports-color: 7.2.0 - dev: true - /supports-preserve-symlinks-flag@1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} dev: true - /terminal-link@2.1.1: - resolution: {integrity: sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ==} - engines: {node: '>=8'} - dependencies: - ansi-escapes: 4.3.2 - supports-hyperlinks: 2.3.0 - dev: true - /test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} @@ -2457,9 +2833,9 @@ packages: dependencies: is-number: 7.0.0 - /ts-jest@29.0.3(@babel/core@7.19.3)(jest@29.1.2)(typescript@5.2.2): - resolution: {integrity: sha512-Ibygvmuyq1qp/z3yTh9QTwVVAbFdDy/+4BtIQR2sp6baF2SJU/8CKK/hhnGIDY2L90Az2jIqTwZPnN2p+BweiQ==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + /ts-jest@29.1.2(@babel/core@7.19.3)(jest@29.7.0)(typescript@5.4.4): + resolution: {integrity: sha512-br6GJoH/WUX4pu7FbZXuWGKGNDuU7b8Uj77g/Sp7puZV6EXzuByl6JrECvm0MzVzSTkSHWTihsXt+5XYER5b+g==} + engines: {node: ^16.10.0 || ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: '@babel/core': '>=7.0.0-beta.0 <8' @@ -2467,7 +2843,7 @@ packages: babel-jest: ^29.0.0 esbuild: '*' jest: ^29.0.0 - typescript: '>=4.3' + typescript: '>=4.3 <6' peerDependenciesMeta: '@babel/core': optional: true @@ -2481,18 +2857,18 @@ packages: '@babel/core': 7.19.3 bs-logger: 0.2.6 fast-json-stable-stringify: 2.1.0 - jest: 29.1.2(@types/node@18.8.2)(ts-node@10.9.1) + jest: 29.7.0(@types/node@18.19.31)(ts-node@10.9.2) jest-util: 29.1.2 - json5: 2.2.1 + json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.3.8 - typescript: 5.2.2 + semver: 7.6.0 + typescript: 5.4.4 yargs-parser: 21.1.1 dev: true - /ts-node@10.9.1(@types/node@18.8.2)(typescript@5.2.2): - resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} + /ts-node@10.9.2(@types/node@18.19.31)(typescript@5.4.4): + resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} hasBin: true peerDependencies: '@swc/core': '>=1.2.50' @@ -2510,14 +2886,14 @@ packages: '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.3 - '@types/node': 18.8.2 + '@types/node': 18.19.31 acorn: 8.8.0 acorn-walk: 8.2.0 arg: 4.1.3 create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.2.2 + typescript: 5.4.4 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 @@ -2536,11 +2912,14 @@ packages: engines: {node: '>=10'} dev: true - /typescript@5.2.2: - resolution: {integrity: sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==} + /typescript@5.4.4: + resolution: {integrity: sha512-dGE2Vv8cpVvw28v8HCPqyb08EzbBURxDpuhJvTrusShUfGnhHBafDsLdS1EhhxyL6BJQE+2cT3dDPAv+MQ6oLw==} engines: {node: '>=14.17'} hasBin: true + /undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + /universalify@2.0.0: resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==} engines: {node: '>= 10.0.0'} @@ -2557,6 +2936,17 @@ packages: picocolors: 1.0.0 dev: true + /update-browserslist-db@1.0.13(browserslist@4.23.0): + resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + dependencies: + browserslist: 4.23.0 + escalade: 3.1.1 + picocolors: 1.0.0 + dev: true + /uuid@8.3.2: resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} hasBin: true @@ -2569,7 +2959,7 @@ packages: resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} engines: {node: '>=10.12.0'} dependencies: - '@jridgewell/trace-mapping': 0.3.15 + '@jridgewell/trace-mapping': 0.3.25 '@types/istanbul-lib-coverage': 2.0.4 convert-source-map: 1.8.0 dev: true @@ -2585,12 +2975,26 @@ packages: engines: {node: '>= 8'} dev: false + /webpod@0.0.2: + resolution: {integrity: sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg==} + hasBin: true + dev: false + /which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} engines: {node: '>= 8'} hasBin: true dependencies: isexe: 2.0.0 + dev: true + + /which@3.0.1: + resolution: {integrity: sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: false /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} @@ -2618,13 +3022,18 @@ packages: engines: {node: '>=10'} dev: true + /yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + dev: true + /yallist@4.0.0: resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} dev: true - /yaml@2.1.2: - resolution: {integrity: sha512-VSdf2/K3FqAetooKQv45Hcu6sA00aDgWZeGcG6V9IYJnVLTnb6988Tie79K5nx2vK7cEpf+yW8Oy+7iPAbdiHA==} + /yaml@2.4.1: + resolution: {integrity: sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==} engines: {node: '>= 14'} + hasBin: true dev: false /yargs-parser@21.1.1: @@ -2654,22 +3063,24 @@ packages: engines: {node: '>=10'} dev: true - /zx@7.0.8: - resolution: {integrity: sha512-sNjfDHzskqrSkWNj0TVhaowVK5AbpvuyuO1RBU4+LrFcgYI5u9CtyWWgUBRtRZl3bgGEF31zByszoBmwS47d1w==} + /zx@7.2.3: + resolution: {integrity: sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA==} engines: {node: '>= 16.0.0'} hasBin: true dependencies: - '@types/fs-extra': 9.0.13 + '@types/fs-extra': 11.0.4 '@types/minimist': 1.2.2 - '@types/node': 18.8.2 + '@types/node': 18.19.31 '@types/ps-tree': 1.1.2 - '@types/which': 2.0.1 - chalk: 5.0.1 - fs-extra: 10.1.0 - globby: 13.1.2 - minimist: 1.2.6 - node-fetch: 3.2.8 + '@types/which': 3.0.3 + chalk: 5.3.0 + fs-extra: 11.2.0 + fx: 34.0.0 + globby: 13.2.2 + minimist: 1.2.8 + node-fetch: 3.3.1 ps-tree: 1.2.0 - which: 2.0.2 - yaml: 2.1.2 + webpod: 0.0.2 + which: 3.0.1 + yaml: 2.4.1 dev: false diff --git a/contracts/pnpm-lock.yaml b/contracts/pnpm-lock.yaml index ec23afcc564..65e8bfc878e 100644 --- a/contracts/pnpm-lock.yaml +++ b/contracts/pnpm-lock.yaml @@ -764,6 +764,11 @@ packages: '@ethersproject/properties': 5.7.0 '@ethersproject/strings': 5.7.0 + /@fastify/busboy@2.1.1: + resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} + engines: {node: '>=14'} + dev: true + /@humanwhocodes/config-array@0.11.14: resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==} engines: {node: '>=10.10.0'} @@ -1088,7 +1093,7 @@ packages: lodash.clonedeep: 4.5.0 semver: 6.3.0 table: 6.8.1 - undici: 5.19.1 + undici: 5.28.4 transitivePeerDependencies: - supports-color dev: true @@ -2067,13 +2072,6 @@ packages: engines: {node: '>=8.0.0'} dev: false - /busboy@1.6.0: - resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} - engines: {node: '>=10.16.0'} - dependencies: - streamsearch: 1.1.0 - dev: true - /bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} @@ -3105,8 +3103,8 @@ packages: resolution: {integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==} dev: true - /follow-redirects@1.15.2(debug@4.3.4): - resolution: {integrity: sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==} + /follow-redirects@1.15.6(debug@4.3.4): + resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} peerDependencies: debug: '*' @@ -3448,7 +3446,7 @@ packages: ts-node: 10.9.2(@types/node@16.18.91)(typescript@5.4.3) tsort: 0.0.1 typescript: 5.4.3 - undici: 5.19.1 + undici: 5.28.4 uuid: 8.3.2 ws: 7.5.9 transitivePeerDependencies: @@ -5061,7 +5059,7 @@ packages: dependencies: command-exists: 1.2.9 commander: 3.0.2 - follow-redirects: 1.15.2(debug@4.3.4) + follow-redirects: 1.15.6(debug@4.3.4) fs-extra: 0.30.0 js-sha3: 0.8.0 memorystream: 0.3.1 @@ -5289,11 +5287,6 @@ packages: mixme: 0.5.10 dev: false - /streamsearch@1.1.0: - resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} - engines: {node: '>=10.0.0'} - dev: true - /string-format@2.0.0: resolution: {integrity: sha512-bbEs3scLeYNXLecRRuk6uJxdXUSj6le/8rNPHChIJTn2V79aXVTR1EH2OH5zLKKoz0V02fOUKZZcw01pLUShZA==} dev: true @@ -5691,11 +5684,11 @@ packages: which-boxed-primitive: 1.0.2 dev: false - /undici@5.19.1: - resolution: {integrity: sha512-YiZ61LPIgY73E7syxCDxxa3LV2yl3sN8spnIuTct60boiiRaE1J8mNWHO8Im2Zi/sFrPusjLlmRPrsyraSqX6A==} - engines: {node: '>=12.18'} + /undici@5.28.4: + resolution: {integrity: sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==} + engines: {node: '>=14.0'} dependencies: - busboy: 1.6.0 + '@fastify/busboy': 2.1.1 dev: true /universalify@0.1.2: From b3aab74aed0dcc958277794d1ba94dc0d051dbfe Mon Sep 17 00:00:00 2001 From: Tate Date: Tue, 9 Apr 2024 07:39:36 -0600 Subject: [PATCH 06/10] Fix nix mac issues building core missing newer apple sdk libs (#12746) --- flake.nix | 7 ++++++- nix.conf | 1 + shell.nix | 3 ++- 3 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 nix.conf diff --git a/flake.nix b/flake.nix index e11e8eac799..928b165ca35 100644 --- a/flake.nix +++ b/flake.nix @@ -10,9 +10,14 @@ flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; overlays = [ ]; }; + # The current default sdk for macOS fails to compile go projects, so we use a newer one for now. + # This has no effect on other platforms. + callPackage = pkgs.darwin.apple_sdk_11_0.callPackage or pkgs.callPackage; in rec { - devShell = pkgs.callPackage ./shell.nix { }; + devShell = callPackage ./shell.nix { + inherit pkgs; + }; formatter = pkgs.nixpkgs-fmt; }); } diff --git a/nix.conf b/nix.conf new file mode 100644 index 00000000000..c7d7291eb02 --- /dev/null +++ b/nix.conf @@ -0,0 +1 @@ +experimental-features = nix-command flakes diff --git a/shell.nix b/shell.nix index bc581f8e85a..7b64b7f58a1 100644 --- a/shell.nix +++ b/shell.nix @@ -1,4 +1,4 @@ -{ pkgs ? import { } }: +{ pkgs }: with pkgs; let go = go_1_21; @@ -46,6 +46,7 @@ mkShell { ]; LD_LIBRARY_PATH = "${stdenv.cc.cc.lib}/lib64:$LD_LIBRARY_PATH"; GOROOT = "${go}/share/go"; + CGO_ENABLED = 0; PGDATA = "db"; CL_DATABASE_URL = "postgresql://chainlink:chainlink@localhost:5432/chainlink_test?sslmode=disable"; From 17d56b83e2de1a40f2ecc3380b347d967ab7c134 Mon Sep 17 00:00:00 2001 From: Vyzaldy Sanchez Date: Tue, 9 Apr 2024 11:41:21 -0400 Subject: [PATCH 07/10] Add support for dynamic pipeline runs with persistence (#12367) * Adds new `job_pipeline_spec` relationship * Adds changeset * Fixes migration file name * Fixes migration * Removes primary key constraint * Restores primary key + adds unique index * Fixes migration * Adds a `job_pipeline_specs` record per job being created * Adds `pruning_key` to `pipeline_runs` * Stores `pipeline_runs` records with a JobID * Fixes migration number * Prunes `pipeline_runs` by `pruning_key` instead of `pipeline_spec_id` * Uses `ExecuteAndInsertFinishedRun` on `pipeline_runner_adapter` * Deletes `job_pipeline_specs` related to `jobs` when deleting a job * Traverses `job_pipeline_specs` relation when fetching a job * Fixes linter * Fixes migration number * Fixes migration * Keeps track of JobID after job insertion - fix * Fixes `loadJobPipelineSpec` * Fixes txmgr tests * Fixes pointer error when loading the job spec * Fixes query issue when inserting runs * Fixes `loadAssociations` query method * Fixes tests WIP * Fixes tests WIP * Fixes tests WIP * Fixes lint * Fixes tests WIP * Increases test coverage * Fixes `InsertJob` relationship query * Removes unnecessary method on pipeline runner * Removes some comments from migration file * Adds missing condition on `findJob` query * Improves pipeline orm test * Reverts unnecessary change * Adds pipeline test ORM * Fixes lint * Fixes migrations out of merge conflict * Fixes test out of merge conflict * Fixes lint --- .changeset/famous-pets-confess.md | 5 + core/chains/evm/txmgr/broadcaster_test.go | 1 + core/chains/evm/txmgr/confirmer_test.go | 2 + core/chains/evm/txmgr/evm_tx_store_test.go | 4 +- core/internal/cltest/factories.go | 13 +- core/services/fluxmonitorv2/orm_test.go | 1 + core/services/job/job_orm_test.go | 22 +- .../job/job_pipeline_orm_integration_test.go | 3 +- core/services/job/models.go | 11 +- core/services/job/orm.go | 95 ++++++-- core/services/job/runner_integration_test.go | 26 ++- core/services/keeper/upkeep_executer_test.go | 7 +- .../generic/pipeline_runner_adapter.go | 5 +- .../generic/pipeline_runner_adapter_test.go | 30 ++- core/services/pipeline/mocks/orm.go | 25 ++ core/services/pipeline/mocks/runner.go | 12 +- core/services/pipeline/models.go | 3 + core/services/pipeline/orm.go | 137 +++++++---- core/services/pipeline/orm_test.go | 215 +++++++++++++++--- core/services/pipeline/runner.go | 26 ++- core/services/pipeline/runner_test.go | 47 +++- .../migrations/0231_dynamic_pipeline_runs.sql | 48 ++++ 22 files changed, 587 insertions(+), 151 deletions(-) create mode 100644 .changeset/famous-pets-confess.md create mode 100644 core/store/migrate/migrations/0231_dynamic_pipeline_runs.sql diff --git a/.changeset/famous-pets-confess.md b/.changeset/famous-pets-confess.md new file mode 100644 index 00000000000..583b17a1603 --- /dev/null +++ b/.changeset/famous-pets-confess.md @@ -0,0 +1,5 @@ +--- +"chainlink": minor +--- + +Provides support for dynamic pipeline runs with persistence. diff --git a/core/chains/evm/txmgr/broadcaster_test.go b/core/chains/evm/txmgr/broadcaster_test.go index d9e9364fdf0..1e8f1c73b34 100644 --- a/core/chains/evm/txmgr/broadcaster_test.go +++ b/core/chains/evm/txmgr/broadcaster_test.go @@ -1025,6 +1025,7 @@ func TestEthBroadcaster_ProcessUnstartedEthTxs_Errors(t *testing.T) { eb := NewTestEthBroadcaster(t, txStore, ethClient, ethKeyStore, evmcfg, &testCheckerFactory{}, false, nonceTracker) ctx := testutils.Context(t) + require.NoError(t, commonutils.JustError(db.Exec(`SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`))) require.NoError(t, commonutils.JustError(db.Exec(`SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`))) t.Run("if external wallet sent a transaction from the account and now the nonce is one higher than it should be and we got replacement underpriced then we assume a previous transaction of ours was the one that succeeded, and hand off to EthConfirmer", func(t *testing.T) { diff --git a/core/chains/evm/txmgr/confirmer_test.go b/core/chains/evm/txmgr/confirmer_test.go index 3e200d66818..80868d448e0 100644 --- a/core/chains/evm/txmgr/confirmer_test.go +++ b/core/chains/evm/txmgr/confirmer_test.go @@ -22,6 +22,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/services/servicetest" + commonclient "github.com/smartcontractkit/chainlink/v2/common/client" commonfee "github.com/smartcontractkit/chainlink/v2/common/fee" txmgrcommon "github.com/smartcontractkit/chainlink/v2/common/txmgr" @@ -2961,6 +2962,7 @@ func TestEthConfirmer_ResumePendingRuns(t *testing.T) { minConfirmations := int64(2) + pgtest.MustExec(t, db, `SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`) pgtest.MustExec(t, db, `SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) t.Run("doesn't process task runs that are not suspended (possibly already previously resumed)", func(t *testing.T) { diff --git a/core/chains/evm/txmgr/evm_tx_store_test.go b/core/chains/evm/txmgr/evm_tx_store_test.go index 4679ffd3339..5bb131862ed 100644 --- a/core/chains/evm/txmgr/evm_tx_store_test.go +++ b/core/chains/evm/txmgr/evm_tx_store_test.go @@ -10,6 +10,7 @@ import ( commonconfig "github.com/smartcontractkit/chainlink-common/pkg/config" "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink-common/pkg/sqlutil" + txmgrcommon "github.com/smartcontractkit/chainlink/v2/common/txmgr" txmgrtypes "github.com/smartcontractkit/chainlink/v2/common/txmgr/types" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/assets" @@ -636,6 +637,7 @@ func TestORM_FindTxesPendingCallback(t *testing.T) { ethClient := evmtest.NewEthClientMockWithDefaultChain(t) _, fromAddress := cltest.MustInsertRandomKeyReturningState(t, ethKeyStore) + pgtest.MustExec(t, db, `SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`) pgtest.MustExec(t, db, `SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) head := evmtypes.Head{ @@ -665,7 +667,7 @@ func TestORM_FindTxesPendingCallback(t *testing.T) { pgtest.MustExec(t, db, `UPDATE evm.txes SET pipeline_task_run_id = $1, min_confirmations = $2, signal_callback = TRUE WHERE id = $3`, &tr1.ID, minConfirmations, etx1.ID) // Callback to pipeline service completed. Should be ignored - run2 := cltest.MustInsertPipelineRunWithStatus(t, db, 0, pipeline.RunStatusCompleted) + run2 := cltest.MustInsertPipelineRunWithStatus(t, db, 0, pipeline.RunStatusCompleted, 0) tr2 := cltest.MustInsertUnfinishedPipelineTaskRun(t, db, run2.ID) etx2 := cltest.MustInsertConfirmedEthTxWithLegacyAttempt(t, txStore, 4, 1, fromAddress) pgtest.MustExec(t, db, `UPDATE evm.txes SET meta='{"FailOnRevert": false}'`) diff --git a/core/internal/cltest/factories.go b/core/internal/cltest/factories.go index 66c96c231e7..2ca7b4947c5 100644 --- a/core/internal/cltest/factories.go +++ b/core/internal/cltest/factories.go @@ -407,6 +407,7 @@ func MustInsertKeeperJob(t *testing.T, db *sqlx.DB, korm keeper.ORM, from evmtyp jrm := job.NewORM(db, prm, btORM, nil, tlg, cfg.Database()) err = jrm.InsertJob(&jb) require.NoError(t, err) + jb.PipelineSpec.JobID = jb.ID return jb } @@ -415,13 +416,13 @@ func MustInsertKeeperRegistry(t *testing.T, db *sqlx.DB, korm keeper.ORM, ethKey from := key.EIP55Address t.Helper() contractAddress := NewEIP55Address() - job := MustInsertKeeperJob(t, db, korm, from, contractAddress) + jb := MustInsertKeeperJob(t, db, korm, from, contractAddress) registry := keeper.Registry{ ContractAddress: contractAddress, BlockCountPerTurn: blockCountPerTurn, CheckGas: 150_000, FromAddress: from, - JobID: job.ID, + JobID: jb.ID, KeeperIndex: keeperIndex, NumKeepers: numKeepers, KeeperIndexMap: map[evmtypes.EIP55Address]int32{ @@ -430,7 +431,7 @@ func MustInsertKeeperRegistry(t *testing.T, db *sqlx.DB, korm keeper.ORM, ethKey } err := korm.UpsertRegistry(®istry) require.NoError(t, err) - return registry, job + return registry, jb } func MustInsertUpkeepForRegistry(t *testing.T, db *sqlx.DB, cfg pg.QConfig, registry keeper.Registry) keeper.UpkeepRegistration { @@ -452,11 +453,11 @@ func MustInsertUpkeepForRegistry(t *testing.T, db *sqlx.DB, cfg pg.QConfig, regi } func MustInsertPipelineRun(t *testing.T, db *sqlx.DB) (run pipeline.Run) { - require.NoError(t, db.Get(&run, `INSERT INTO pipeline_runs (state,pipeline_spec_id,created_at) VALUES ($1, 0, NOW()) RETURNING *`, pipeline.RunStatusRunning)) + require.NoError(t, db.Get(&run, `INSERT INTO pipeline_runs (state,pipeline_spec_id,pruning_key,created_at) VALUES ($1, 0, 0, NOW()) RETURNING *`, pipeline.RunStatusRunning)) return run } -func MustInsertPipelineRunWithStatus(t *testing.T, db *sqlx.DB, pipelineSpecID int32, status pipeline.RunStatus) (run pipeline.Run) { +func MustInsertPipelineRunWithStatus(t *testing.T, db *sqlx.DB, pipelineSpecID int32, status pipeline.RunStatus, jobID int32) (run pipeline.Run) { var finishedAt *time.Time var outputs jsonserializable.JSONSerializable var allErrors pipeline.RunErrors @@ -478,7 +479,7 @@ func MustInsertPipelineRunWithStatus(t *testing.T, db *sqlx.DB, pipelineSpecID i default: t.Fatalf("unknown status: %s", status) } - require.NoError(t, db.Get(&run, `INSERT INTO pipeline_runs (state,pipeline_spec_id,finished_at,outputs,all_errors,fatal_errors,created_at) VALUES ($1, $2, $3, $4, $5, $6, NOW()) RETURNING *`, status, pipelineSpecID, finishedAt, outputs, allErrors, fatalErrors)) + require.NoError(t, db.Get(&run, `INSERT INTO pipeline_runs (state,pipeline_spec_id,pruning_key,finished_at,outputs,all_errors,fatal_errors,created_at) VALUES ($1, $2, $3, $4, $5, $6, $7, NOW()) RETURNING *`, status, pipelineSpecID, jobID, finishedAt, outputs, allErrors, fatalErrors)) return run } diff --git a/core/services/fluxmonitorv2/orm_test.go b/core/services/fluxmonitorv2/orm_test.go index 21a80735863..9f85a99b6ea 100644 --- a/core/services/fluxmonitorv2/orm_test.go +++ b/core/services/fluxmonitorv2/orm_test.go @@ -112,6 +112,7 @@ func TestORM_UpdateFluxMonitorRoundStats(t *testing.T) { &pipeline.Run{ State: pipeline.RunStatusCompleted, PipelineSpecID: jb.PipelineSpec.ID, + PruningKey: jb.ID, PipelineSpec: *jb.PipelineSpec, CreatedAt: time.Now(), FinishedAt: null.TimeFrom(f), diff --git a/core/services/job/job_orm_test.go b/core/services/job/job_orm_test.go index d763386a00d..1e714da5908 100644 --- a/core/services/job/job_orm_test.go +++ b/core/services/job/job_orm_test.go @@ -108,6 +108,23 @@ func TestORM(t *testing.T) { compareOCRJobSpecs(t, *jb, returnedSpec) }) + t.Run("it correctly mark job_pipeline_specs as primary when creating a job", func(t *testing.T) { + jb2 := makeOCRJobSpec(t, address, bridge.Name.String(), bridge2.Name.String()) + err := orm.CreateJob(jb2) + require.NoError(t, err) + + var pipelineSpec pipeline.Spec + err = db.Get(&pipelineSpec, "SELECT pipeline_specs.* FROM pipeline_specs JOIN job_pipeline_specs ON (pipeline_specs.id = job_pipeline_specs.pipeline_spec_id) WHERE job_pipeline_specs.job_id = $1", jb2.ID) + require.NoError(t, err) + var jobPipelineSpec job.PipelineSpec + err = db.Get(&jobPipelineSpec, "SELECT * FROM job_pipeline_specs WHERE job_id = $1 AND pipeline_spec_id = $2", jb2.ID, pipelineSpec.ID) + require.NoError(t, err) + + // `jb2.PipelineSpecID` gets loaded when calling `orm.CreateJob()` so we can compare it directly + assert.Equal(t, jb2.PipelineSpecID, pipelineSpec.ID) + assert.True(t, jobPipelineSpec.IsPrimary) + }) + t.Run("autogenerates external job ID if missing", func(t *testing.T) { jb2 := makeOCRJobSpec(t, address, bridge.Name.String(), bridge2.Name.String()) jb2.ExternalJobID = uuid.UUID{} @@ -126,7 +143,7 @@ func TestORM(t *testing.T) { err := db.Select(&dbSpecs, "SELECT * FROM jobs") require.NoError(t, err) - require.Len(t, dbSpecs, 2) + require.Len(t, dbSpecs, 3) err = orm.DeleteJob(jb.ID) require.NoError(t, err) @@ -134,7 +151,7 @@ func TestORM(t *testing.T) { dbSpecs = []job.Job{} err = db.Select(&dbSpecs, "SELECT * FROM jobs") require.NoError(t, err) - require.Len(t, dbSpecs, 1) + require.Len(t, dbSpecs, 2) }) t.Run("increase job spec error occurrence", func(t *testing.T) { @@ -1729,6 +1746,7 @@ func mustInsertPipelineRun(t *testing.T, orm pipeline.ORM, j job.Job) pipeline.R run := pipeline.Run{ PipelineSpecID: j.PipelineSpecID, + PruningKey: j.ID, State: pipeline.RunStatusRunning, Outputs: jsonserializable.JSONSerializable{Valid: false}, AllErrors: pipeline.RunErrors{}, diff --git a/core/services/job/job_pipeline_orm_integration_test.go b/core/services/job/job_pipeline_orm_integration_test.go index dd3062fa14b..c7842e1b160 100644 --- a/core/services/job/job_pipeline_orm_integration_test.go +++ b/core/services/job/job_pipeline_orm_integration_test.go @@ -10,6 +10,7 @@ import ( "github.com/jmoiron/sqlx" commonconfig "github.com/smartcontractkit/chainlink-common/pkg/config" + "github.com/smartcontractkit/chainlink/v2/core/bridges" "github.com/smartcontractkit/chainlink/v2/core/internal/cltest" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" @@ -165,7 +166,7 @@ func TestPipelineORM_Integration(t *testing.T) { require.NoError(t, jobORM.CreateJob(dbSpec)) var pipelineSpecs []pipeline.Spec - sql := `SELECT * FROM pipeline_specs;` + sql := `SELECT pipeline_specs.*, job_pipeline_specs.job_id FROM pipeline_specs JOIN job_pipeline_specs ON (pipeline_specs.id = job_pipeline_specs.pipeline_spec_id);` require.NoError(t, db.Select(&pipelineSpecs, sql)) require.Len(t, pipelineSpecs, 1) require.Equal(t, dbSpec.PipelineSpecID, pipelineSpecs[0].ID) diff --git a/core/services/job/models.go b/core/services/job/models.go index 218be21bc54..4d75c7d90d2 100644 --- a/core/services/job/models.go +++ b/core/services/job/models.go @@ -163,7 +163,7 @@ type Job struct { EALSpecID *int32 LiquidityBalancerSpec *LiquidityBalancerSpec LiquidityBalancerSpecID *int32 - PipelineSpecID int32 + PipelineSpecID int32 // This is deprecated in favor of the `job_pipeline_specs` table relationship PipelineSpec *pipeline.Spec JobSpecErrors []SpecError Type Type `toml:"type"` @@ -208,6 +208,12 @@ func (j *Job) SetID(value string) error { return nil } +type PipelineSpec struct { + JobID int32 `json:"-"` + PipelineSpecID int32 `json:"-"` + IsPrimary bool `json:"is_primary"` +} + type SpecError struct { ID int64 JobID int32 @@ -229,7 +235,8 @@ func (j *SpecError) SetID(value string) error { } type PipelineRun struct { - ID int64 `json:"-"` + ID int64 `json:"-"` + PruningKey int64 `json:"-"` } func (pr PipelineRun) GetID() string { diff --git a/core/services/job/orm.go b/core/services/job/orm.go index 6c8533d1dee..c05e944ea1e 100644 --- a/core/services/job/orm.go +++ b/core/services/job/orm.go @@ -534,41 +534,51 @@ func (o *orm) InsertWebhookSpec(webhookSpec *WebhookSpec, qopts ...pg.QOpt) erro func (o *orm) InsertJob(job *Job, qopts ...pg.QOpt) error { q := o.q.WithOpts(qopts...) - var query string + return q.Transaction(func(querier pg.Queryer) error { + var query string - // if job has id, emplace otherwise insert with a new id. - if job.ID == 0 { - query = `INSERT INTO jobs (pipeline_spec_id, name, stream_id, schema_version, type, max_task_duration, ocr_oracle_spec_id, ocr2_oracle_spec_id, direct_request_spec_id, flux_monitor_spec_id, + // if job has id, emplace otherwise insert with a new id. + if job.ID == 0 { + query = `INSERT INTO jobs (name, stream_id, schema_version, type, max_task_duration, ocr_oracle_spec_id, ocr2_oracle_spec_id, direct_request_spec_id, flux_monitor_spec_id, keeper_spec_id, cron_spec_id, vrf_spec_id, webhook_spec_id, blockhash_store_spec_id, bootstrap_spec_id, block_header_feeder_spec_id, gateway_spec_id, legacy_gas_station_server_spec_id, legacy_gas_station_sidecar_spec_id, external_job_id, gas_limit, forwarding_allowed, created_at) - VALUES (:pipeline_spec_id, :name, :stream_id, :schema_version, :type, :max_task_duration, :ocr_oracle_spec_id, :ocr2_oracle_spec_id, :direct_request_spec_id, :flux_monitor_spec_id, + VALUES (:name, :stream_id, :schema_version, :type, :max_task_duration, :ocr_oracle_spec_id, :ocr2_oracle_spec_id, :direct_request_spec_id, :flux_monitor_spec_id, :keeper_spec_id, :cron_spec_id, :vrf_spec_id, :webhook_spec_id, :blockhash_store_spec_id, :bootstrap_spec_id, :block_header_feeder_spec_id, :gateway_spec_id, :legacy_gas_station_server_spec_id, :legacy_gas_station_sidecar_spec_id, :external_job_id, :gas_limit, :forwarding_allowed, NOW()) RETURNING *;` - } else { - query = `INSERT INTO jobs (id, pipeline_spec_id, name, stream_id, schema_version, type, max_task_duration, ocr_oracle_spec_id, ocr2_oracle_spec_id, direct_request_spec_id, flux_monitor_spec_id, + } else { + query = `INSERT INTO jobs (id, name, stream_id, schema_version, type, max_task_duration, ocr_oracle_spec_id, ocr2_oracle_spec_id, direct_request_spec_id, flux_monitor_spec_id, keeper_spec_id, cron_spec_id, vrf_spec_id, webhook_spec_id, blockhash_store_spec_id, bootstrap_spec_id, block_header_feeder_spec_id, gateway_spec_id, legacy_gas_station_server_spec_id, legacy_gas_station_sidecar_spec_id, external_job_id, gas_limit, forwarding_allowed, created_at) - VALUES (:id, :pipeline_spec_id, :name, :stream_id, :schema_version, :type, :max_task_duration, :ocr_oracle_spec_id, :ocr2_oracle_spec_id, :direct_request_spec_id, :flux_monitor_spec_id, + VALUES (:id, :name, :stream_id, :schema_version, :type, :max_task_duration, :ocr_oracle_spec_id, :ocr2_oracle_spec_id, :direct_request_spec_id, :flux_monitor_spec_id, :keeper_spec_id, :cron_spec_id, :vrf_spec_id, :webhook_spec_id, :blockhash_store_spec_id, :bootstrap_spec_id, :block_header_feeder_spec_id, :gateway_spec_id, :legacy_gas_station_server_spec_id, :legacy_gas_station_sidecar_spec_id, :external_job_id, :gas_limit, :forwarding_allowed, NOW()) RETURNING *;` - } - return q.GetNamed(query, job, job) + } + err := q.GetNamed(query, job, job) + if err != nil { + return err + } + + // Always inserts the `job_pipeline_specs` record as primary, since this is the first one for the job. + sqlStmt := `INSERT INTO job_pipeline_specs (job_id, pipeline_spec_id, is_primary) VALUES ($1, $2, true)` + _, err = q.Exec(sqlStmt, job.ID, job.PipelineSpecID) + return errors.Wrap(err, "failed to insert job_pipeline_specs relationship") + }) } // DeleteJob removes a job func (o *orm) DeleteJob(id int32, qopts ...pg.QOpt) error { o.lggr.Debugw("Deleting job", "jobID", id) - // Added a 1 minute timeout to this query since this can take a long time as data increases. - // This was added specifically due to an issue with a database that had a millions of pipeline_runs and pipeline_task_runs + // Added a 1-minute timeout to this query since this can take a long time as data increases. + // This was added specifically due to an issue with a database that had a million of pipeline_runs and pipeline_task_runs // and this query was taking ~40secs. qopts = append(qopts, pg.WithLongQueryTimeout()) q := o.q.WithOpts(qopts...) query := ` WITH deleted_jobs AS ( DELETE FROM jobs WHERE id = $1 RETURNING - pipeline_spec_id, + id, ocr_oracle_spec_id, ocr2_oracle_spec_id, keeper_spec_id, @@ -617,8 +627,11 @@ func (o *orm) DeleteJob(id int32, qopts ...pg.QOpt) error { ), deleted_gateway_specs AS ( DELETE FROM gateway_specs WHERE id IN (SELECT gateway_spec_id FROM deleted_jobs) + ), + deleted_job_pipeline_specs AS ( + DELETE FROM job_pipeline_specs WHERE job_id IN (SELECT id FROM deleted_jobs) RETURNING pipeline_spec_id ) - DELETE FROM pipeline_specs WHERE id IN (SELECT pipeline_spec_id FROM deleted_jobs)` + DELETE FROM pipeline_specs WHERE id IN (SELECT pipeline_spec_id FROM deleted_job_pipeline_specs)` res, cancel, err := q.ExecQIter(query, id) defer cancel() if err != nil { @@ -692,7 +705,10 @@ func (o *orm) FindJobs(offset, limit int) (jobs []Job, count int, err error) { return err } - sql = `SELECT * FROM jobs ORDER BY created_at DESC, id DESC OFFSET $1 LIMIT $2;` + sql = `SELECT jobs.*, job_pipeline_specs.pipeline_spec_id as pipeline_spec_id + FROM jobs + JOIN job_pipeline_specs ON (jobs.id = job_pipeline_specs.job_id) + ORDER BY jobs.created_at DESC, jobs.id DESC OFFSET $1 LIMIT $2;` err = tx.Select(&jobs, sql, offset, limit) if err != nil { return err @@ -807,7 +823,7 @@ func (o *orm) FindJob(ctx context.Context, id int32) (jb Job, err error) { // FindJobWithoutSpecErrors returns a job by ID, without loading Spec Errors preloaded func (o *orm) FindJobWithoutSpecErrors(id int32) (jb Job, err error) { err = o.q.Transaction(func(tx pg.Queryer) error { - stmt := "SELECT * FROM jobs WHERE id = $1 LIMIT 1" + stmt := "SELECT jobs.*, job_pipeline_specs.pipeline_spec_id as pipeline_spec_id FROM jobs JOIN job_pipeline_specs ON (jobs.id = job_pipeline_specs.job_id) WHERE jobs.id = $1 LIMIT 1" err = tx.Get(&jb, stmt, id) if err != nil { return errors.Wrap(err, "failed to load job") @@ -897,7 +913,7 @@ WHERE ocr2spec.id IS NOT NULL OR bs.id IS NOT NULL func (o *orm) findJob(jb *Job, col string, arg interface{}, qopts ...pg.QOpt) error { q := o.q.WithOpts(qopts...) err := q.Transaction(func(tx pg.Queryer) error { - sql := fmt.Sprintf(`SELECT * FROM jobs WHERE %s = $1 LIMIT 1`, col) + sql := fmt.Sprintf(`SELECT jobs.*, job_pipeline_specs.pipeline_spec_id FROM jobs JOIN job_pipeline_specs ON (jobs.id = job_pipeline_specs.job_id) WHERE jobs.%s = $1 AND job_pipeline_specs.is_primary = true LIMIT 1`, col) err := tx.Get(jb, sql, arg) if err != nil { return errors.Wrap(err, "failed to load job") @@ -917,7 +933,13 @@ func (o *orm) findJob(jb *Job, col string, arg interface{}, qopts ...pg.QOpt) er func (o *orm) FindJobIDsWithBridge(name string) (jids []int32, err error) { err = o.q.Transaction(func(tx pg.Queryer) error { - query := `SELECT jobs.id, dot_dag_source FROM jobs JOIN pipeline_specs ON pipeline_specs.id = jobs.pipeline_spec_id WHERE dot_dag_source ILIKE '%' || $1 || '%' ORDER BY id` + query := `SELECT + jobs.id, pipeline_specs.dot_dag_source + FROM jobs + JOIN job_pipeline_specs ON job_pipeline_specs.job_id = jobs.id + JOIN pipeline_specs ON pipeline_specs.id = job_pipeline_specs.pipeline_spec_id + WHERE pipeline_specs.dot_dag_source ILIKE '%' || $1 || '%' ORDER BY id` + var rows *sqlx.Rows rows, err = tx.Queryx(query, name) if err != nil { @@ -958,7 +980,7 @@ func (o *orm) FindJobIDsWithBridge(name string) (jids []int32, err error) { // PipelineRunsByJobsIDs returns pipeline runs for multiple jobs, not preloading data func (o *orm) PipelineRunsByJobsIDs(ids []int32) (runs []pipeline.Run, err error) { err = o.q.Transaction(func(tx pg.Queryer) error { - stmt := `SELECT pipeline_runs.* FROM pipeline_runs INNER JOIN jobs ON pipeline_runs.pipeline_spec_id = jobs.pipeline_spec_id WHERE jobs.id = ANY($1) + stmt := `SELECT pipeline_runs.* FROM pipeline_runs INNER JOIN job_pipeline_specs ON pipeline_runs.pipeline_spec_id = job_pipeline_specs.pipeline_spec_id WHERE jobs.id = ANY($1) ORDER BY pipeline_runs.created_at DESC, pipeline_runs.id DESC;` if err = tx.Select(&runs, stmt, ids); err != nil { return errors.Wrap(err, "error loading runs") @@ -987,7 +1009,7 @@ func (o *orm) loadPipelineRunIDs(jobID *int32, offset, limit int, tx pg.Queryer) var filter string if jobID != nil { - filter = fmt.Sprintf("JOIN jobs USING(pipeline_spec_id) WHERE jobs.id = %d AND ", *jobID) + filter = fmt.Sprintf("JOIN job_pipeline_specs USING(pipeline_spec_id) WHERE job_pipeline_specs.job_id = %d AND ", *jobID) } else { filter = "WHERE " } @@ -1132,7 +1154,7 @@ WHERE id = $1 // CountPipelineRunsByJobID returns the total number of pipeline runs for a job. func (o *orm) CountPipelineRunsByJobID(jobID int32) (count int32, err error) { err = o.q.Transaction(func(tx pg.Queryer) error { - stmt := "SELECT COUNT(*) FROM pipeline_runs JOIN jobs USING (pipeline_spec_id) WHERE jobs.id = $1" + stmt := "SELECT COUNT(*) FROM pipeline_runs JOIN job_pipeline_specs USING (pipeline_spec_id) WHERE job_pipeline_specs.job_id = $1" if err = tx.Get(&count, stmt, jobID); err != nil { return errors.Wrap(err, "error counting runs") } @@ -1147,7 +1169,7 @@ func (o *orm) FindJobsByPipelineSpecIDs(ids []int32) ([]Job, error) { var jbs []Job err := o.q.Transaction(func(tx pg.Queryer) error { - stmt := `SELECT * FROM jobs WHERE jobs.pipeline_spec_id = ANY($1) ORDER BY id ASC + stmt := `SELECT jobs.*, job_pipeline_specs.pipeline_spec_id FROM jobs JOIN job_pipeline_specs ON (jobs.id = job_pipeline_specs.job_id) WHERE job_pipeline_specs.pipeline_spec_id = ANY($1) ORDER BY jobs.id ASC ` if err := tx.Select(&jbs, stmt, ids); err != nil { return errors.Wrap(err, "error fetching jobs by pipeline spec IDs") @@ -1169,7 +1191,7 @@ func (o *orm) FindJobsByPipelineSpecIDs(ids []int32) ([]Job, error) { func (o *orm) PipelineRuns(jobID *int32, offset, size int) (runs []pipeline.Run, count int, err error) { var filter string if jobID != nil { - filter = fmt.Sprintf("JOIN jobs USING(pipeline_spec_id) WHERE jobs.id = %d", *jobID) + filter = fmt.Sprintf("JOIN job_pipeline_specs USING(pipeline_spec_id) WHERE job_pipeline_specs.job_id = %d", *jobID) } err = o.q.Transaction(func(tx pg.Queryer) error { sql := fmt.Sprintf(`SELECT count(*) FROM pipeline_runs %s`, filter) @@ -1200,7 +1222,7 @@ func (o *orm) loadPipelineRunsRelations(runs []pipeline.Run, tx pg.Queryer) ([]p for specID := range specM { specIDs = append(specIDs, specID) } - stmt := `SELECT pipeline_specs.*, jobs.id AS job_id FROM pipeline_specs JOIN jobs ON pipeline_specs.id = jobs.pipeline_spec_id WHERE pipeline_specs.id = ANY($1);` + stmt := `SELECT pipeline_specs.*, job_pipeline_specs.job_id AS job_id FROM pipeline_specs JOIN job_pipeline_specs ON pipeline_specs.id = job_pipeline_specs.pipeline_spec_id WHERE pipeline_specs.id = ANY($1);` var specs []pipeline.Spec if err := o.q.Select(&specs, stmt, specIDs); err != nil { return nil, errors.Wrap(err, "error loading specs") @@ -1247,7 +1269,7 @@ func LoadAllJobsTypes(tx pg.Queryer, jobs []Job) error { func LoadAllJobTypes(tx pg.Queryer, job *Job) error { return multierr.Combine( - loadJobType(tx, job, "PipelineSpec", "pipeline_specs", &job.PipelineSpecID), + loadJobPipelineSpec(tx, job, &job.PipelineSpecID), loadJobType(tx, job, "FluxMonitorSpec", "flux_monitor_specs", job.FluxMonitorSpecID), loadJobType(tx, job, "DirectRequestSpec", "direct_request_specs", job.DirectRequestSpecID), loadJobType(tx, job, "OCROracleSpec", "ocr_oracle_specs", job.OCROracleSpecID), @@ -1287,6 +1309,29 @@ func loadJobType(tx pg.Queryer, job *Job, field, table string, id *int32) error return nil } +func loadJobPipelineSpec(tx pg.Queryer, job *Job, id *int32) error { + if id == nil { + return nil + } + pipelineSpecRow := new(pipeline.Spec) + if job.PipelineSpec != nil { + pipelineSpecRow = job.PipelineSpec + } + err := tx.Get( + pipelineSpecRow, + `SELECT pipeline_specs.*, job_pipeline_specs.job_id as job_id + FROM pipeline_specs + JOIN job_pipeline_specs ON(pipeline_specs.id = job_pipeline_specs.pipeline_spec_id) + WHERE job_pipeline_specs.job_id = $1 AND job_pipeline_specs.pipeline_spec_id = $2`, + job.ID, *id, + ) + if err != nil { + return errors.Wrapf(err, "failed to load job type PipelineSpec with id %d", *id) + } + job.PipelineSpec = pipelineSpecRow + return nil +} + func loadVRFJob(tx pg.Queryer, job *Job, id *int32) error { if id == nil { return nil diff --git a/core/services/job/runner_integration_test.go b/core/services/job/runner_integration_test.go index 3a1f69afa1b..110d4a41a91 100644 --- a/core/services/job/runner_integration_test.go +++ b/core/services/job/runner_integration_test.go @@ -126,9 +126,10 @@ func TestRunner(t *testing.T) { m, err := bridges.MarshalBridgeMetaData(big.NewInt(10), big.NewInt(100)) require.NoError(t, err) - runID, results, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(map[string]interface{}{"jobRun": map[string]interface{}{"meta": m}}), logger.TestLogger(t), true) + runID, taskResults, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(map[string]interface{}{"jobRun": map[string]interface{}{"meta": m}}), logger.TestLogger(t), true) require.NoError(t, err) + results := taskResults.FinalResult(logger.TestLogger(t)) require.Len(t, results.Values, 2) require.GreaterOrEqual(t, len(results.FatalErrors), 2) assert.Nil(t, results.FatalErrors[0]) @@ -313,9 +314,10 @@ answer1 [type=median index=0]; err := jobORM.CreateJob(jb) require.NoError(t, err) - runID, results, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + runID, taskResults, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) + results := taskResults.FinalResult(logger.TestLogger(t)) assert.Len(t, results.FatalErrors, 1) assert.Len(t, results.Values, 1) assert.Contains(t, results.FatalErrors[0].Error(), "type cannot be converted to decimal.Decimal") @@ -358,9 +360,10 @@ answer1 [type=median index=0]; err := jobORM.CreateJob(jb) require.NoError(t, err) - runID, results, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + runID, taskResults, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) + results := taskResults.FinalResult(logger.TestLogger(t)) assert.Len(t, results.Values, 1) assert.Len(t, results.FatalErrors, 1) assert.Contains(t, results.FatalErrors[0].Error(), pipeline.ErrTooManyErrors.Error()) @@ -402,9 +405,10 @@ answer1 [type=median index=0]; err := jobORM.CreateJob(jb) require.NoError(t, err) - runID, results, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + runID, taskResults, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) + results := taskResults.FinalResult(logger.TestLogger(t)) assert.Len(t, results.Values, 1) assert.Contains(t, results.FatalErrors[0].Error(), "type cannot be converted to decimal.Decimal") assert.Nil(t, results.Values[0]) @@ -685,8 +689,9 @@ answer1 [type=median index=0]; err := jobORM.CreateJob(jb) require.NoError(t, err) - _, results, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + _, taskResults, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) + results := taskResults.FinalResult(logger.TestLogger(t)) assert.Nil(t, results.Values[0]) // No task timeout should succeed. @@ -694,8 +699,9 @@ answer1 [type=median index=0]; jb.Name = null.NewString("a job 2", true) err = jobORM.CreateJob(jb) require.NoError(t, err) - _, results, err = runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + _, taskResults, err = runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) + results = taskResults.FinalResult(logger.TestLogger(t)) assert.Equal(t, 10.1, results.Values[0]) assert.Nil(t, results.FatalErrors[0]) @@ -706,9 +712,10 @@ answer1 [type=median index=0]; err = jobORM.CreateJob(jb) require.NoError(t, err) - _, results, err = runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + _, taskResults, err = runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) - assert.NotNil(t, results.FatalErrors[0]) + resultsNoFatalErrs := taskResults.FinalResult(logger.TestLogger(t)) + assert.NotNil(t, resultsNoFatalErrs.FatalErrors[0]) }) t.Run("deleting jobs", func(t *testing.T) { @@ -724,8 +731,9 @@ answer1 [type=median index=0]; err := jobORM.CreateJob(jb) require.NoError(t, err) - _, results, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) + _, taskResults, err := runner.ExecuteAndInsertFinishedRun(testutils.Context(t), *jb.PipelineSpec, pipeline.NewVarsFrom(nil), logger.TestLogger(t), true) require.NoError(t, err) + results := taskResults.FinalResult(logger.TestLogger(t)) assert.Len(t, results.Values, 1) assert.Nil(t, results.FatalErrors[0]) assert.Equal(t, "4242", results.Values[0].(decimal.Decimal).String()) diff --git a/core/services/keeper/upkeep_executer_test.go b/core/services/keeper/upkeep_executer_test.go index 8299f47c853..fbe61f35743 100644 --- a/core/services/keeper/upkeep_executer_test.go +++ b/core/services/keeper/upkeep_executer_test.go @@ -16,6 +16,7 @@ import ( "github.com/jmoiron/sqlx" "github.com/smartcontractkit/chainlink-common/pkg/services/servicetest" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/assets" evmclimocks "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client/mocks" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/gas" @@ -85,13 +86,13 @@ func setup(t *testing.T, estimator gas.EvmFeeEstimator, overrideFn func(c *chain jpv2 := cltest.NewJobPipelineV2(t, cfg.WebServer(), cfg.JobPipeline(), cfg.Database(), legacyChains, db, keyStore, nil, nil) ch := evmtest.MustGetDefaultChain(t, legacyChains) orm := keeper.NewORM(db, logger.TestLogger(t), ch.Config().Database()) - registry, job := cltest.MustInsertKeeperRegistry(t, db, orm, keyStore.Eth(), 0, 1, 20) + registry, jb := cltest.MustInsertKeeperRegistry(t, db, orm, keyStore.Eth(), 0, 1, 20) lggr := logger.TestLogger(t) - executer := keeper.NewUpkeepExecuter(job, orm, jpv2.Pr, ethClient, ch.HeadBroadcaster(), ch.GasEstimator(), lggr, ch.Config().Keeper(), job.KeeperSpec.FromAddress.Address()) + executer := keeper.NewUpkeepExecuter(jb, orm, jpv2.Pr, ethClient, ch.HeadBroadcaster(), ch.GasEstimator(), lggr, ch.Config().Keeper(), jb.KeeperSpec.FromAddress.Address()) upkeep := cltest.MustInsertUpkeepForRegistry(t, db, ch.Config().Database(), registry) servicetest.Run(t, executer) - return db, cfg, ethClient, executer, registry, upkeep, job, jpv2, txm, keyStore, ch, orm + return db, cfg, ethClient, executer, registry, upkeep, jb, jpv2, txm, keyStore, ch, orm } var checkUpkeepResponse = struct { diff --git a/core/services/ocr2/plugins/generic/pipeline_runner_adapter.go b/core/services/ocr2/plugins/generic/pipeline_runner_adapter.go index b13d7b35e0b..e6a429a5f73 100644 --- a/core/services/ocr2/plugins/generic/pipeline_runner_adapter.go +++ b/core/services/ocr2/plugins/generic/pipeline_runner_adapter.go @@ -5,6 +5,7 @@ import ( "time" "github.com/smartcontractkit/chainlink-common/pkg/types" + "github.com/smartcontractkit/chainlink/v2/core/logger" "github.com/smartcontractkit/chainlink/v2/core/services/job" "github.com/smartcontractkit/chainlink/v2/core/services/pipeline" @@ -14,7 +15,7 @@ import ( var _ types.PipelineRunnerService = (*PipelineRunnerAdapter)(nil) type pipelineRunner interface { - ExecuteRun(ctx context.Context, spec pipeline.Spec, vars pipeline.Vars, l logger.Logger) (run *pipeline.Run, trrs pipeline.TaskRunResults, err error) + ExecuteAndInsertFinishedRun(ctx context.Context, spec pipeline.Spec, vars pipeline.Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (runID int64, results pipeline.TaskRunResults, err error) } type PipelineRunnerAdapter struct { @@ -44,7 +45,7 @@ func (p *PipelineRunnerAdapter) ExecuteRun(ctx context.Context, spec string, var merge(defaultVars, vars.Vars) finalVars := pipeline.NewVarsFrom(defaultVars) - _, trrs, err := p.runner.ExecuteRun(ctx, s, finalVars, p.logger) + _, trrs, err := p.runner.ExecuteAndInsertFinishedRun(ctx, s, finalVars, p.logger, true) if err != nil { return nil, err } diff --git a/core/services/ocr2/plugins/generic/pipeline_runner_adapter_test.go b/core/services/ocr2/plugins/generic/pipeline_runner_adapter_test.go index a4bc8eb0b16..569d5b49364 100644 --- a/core/services/ocr2/plugins/generic/pipeline_runner_adapter_test.go +++ b/core/services/ocr2/plugins/generic/pipeline_runner_adapter_test.go @@ -13,7 +13,9 @@ import ( "gopkg.in/guregu/null.v4" "github.com/smartcontractkit/chainlink-common/pkg/types" + "github.com/smartcontractkit/chainlink/v2/core/bridges" + "github.com/smartcontractkit/chainlink/v2/core/internal/cltest" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/configtest" _ "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" @@ -21,8 +23,10 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/services/job" "github.com/smartcontractkit/chainlink/v2/core/services/keystore" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/generic" + ocr2validate "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/validate" "github.com/smartcontractkit/chainlink/v2/core/services/pg" "github.com/smartcontractkit/chainlink/v2/core/services/pipeline" + "github.com/smartcontractkit/chainlink/v2/core/testdata/testspecs" "github.com/smartcontractkit/chainlink/v2/core/utils" ) @@ -41,6 +45,7 @@ func TestAdapter_Integration(t *testing.T) { keystore := keystore.NewInMemory(db, utils.FastScryptParams, logger, cfg.Database()) pipelineORM := pipeline.NewORM(db, logger, cfg.Database(), cfg.JobPipeline().MaxSuccessfulRuns()) bridgesORM := bridges.NewORM(db, logger, cfg.Database()) + jobORM := job.NewORM(db, pipelineORM, bridgesORM, keystore, logger, cfg.Database()) pr := pipeline.NewRunner( pipelineORM, bridgesORM, @@ -53,7 +58,24 @@ func TestAdapter_Integration(t *testing.T) { http.DefaultClient, http.DefaultClient, ) - pra := generic.NewPipelineRunnerAdapter(logger, job.Job{}, pr) + err = keystore.Unlock(cfg.Password().Keystore()) + require.NoError(t, err) + jb, err := ocr2validate.ValidatedOracleSpecToml(testutils.Context(t), cfg.OCR2(), cfg.Insecure(), testspecs.GetOCR2EVMSpecMinimal(), nil) + require.NoError(t, err) + + const juelsPerFeeCoinSource = ` + ds [type=http method=GET url="https://chain.link/ETH-USD"]; + ds_parse [type=jsonparse path="data.price" separator="."]; + ds_multiply [type=multiply times=100]; + ds -> ds_parse -> ds_multiply;` + + _, address := cltest.MustInsertRandomKey(t, keystore.Eth()) + jb.Name = null.StringFrom("Job 1") + jb.OCR2OracleSpec.TransmitterID = null.StringFrom(address.String()) + jb.OCR2OracleSpec.PluginConfig["juelsPerFeeCoinSource"] = juelsPerFeeCoinSource + err = jobORM.CreateJob(&jb) + require.NoError(t, err) + pra := generic.NewPipelineRunnerAdapter(logger, jb, pr) results, err := pra.ExecuteRun(testutils.Context(t), spec, types.Vars{Vars: map[string]interface{}{"val": 1}}, types.Options{}) require.NoError(t, err) @@ -69,15 +91,15 @@ func newMockPipelineRunner() *mockPipelineRunner { type mockPipelineRunner struct { results pipeline.TaskRunResults err error - run *pipeline.Run spec pipeline.Spec vars pipeline.Vars } -func (m *mockPipelineRunner) ExecuteRun(ctx context.Context, spec pipeline.Spec, vars pipeline.Vars, l logger.Logger) (*pipeline.Run, pipeline.TaskRunResults, error) { +func (m *mockPipelineRunner) ExecuteAndInsertFinishedRun(ctx context.Context, spec pipeline.Spec, vars pipeline.Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (runID int64, results pipeline.TaskRunResults, err error) { m.spec = spec m.vars = vars - return m.run, m.results, m.err + // We never attach a run to the mock, so we can't return a runID + return 0, m.results, m.err } func TestAdapter_AddsDefaultVars(t *testing.T) { diff --git a/core/services/pipeline/mocks/orm.go b/core/services/pipeline/mocks/orm.go index 759686204d4..b06041767a1 100644 --- a/core/services/pipeline/mocks/orm.go +++ b/core/services/pipeline/mocks/orm.go @@ -275,6 +275,31 @@ func (_m *ORM) InsertFinishedRun(run *pipeline.Run, saveSuccessfulTaskRuns bool, return r0 } +// InsertFinishedRunWithSpec provides a mock function with given fields: run, saveSuccessfulTaskRuns, qopts +func (_m *ORM) InsertFinishedRunWithSpec(run *pipeline.Run, saveSuccessfulTaskRuns bool, qopts ...pg.QOpt) error { + _va := make([]interface{}, len(qopts)) + for _i := range qopts { + _va[_i] = qopts[_i] + } + var _ca []interface{} + _ca = append(_ca, run, saveSuccessfulTaskRuns) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + if len(ret) == 0 { + panic("no return value specified for InsertFinishedRunWithSpec") + } + + var r0 error + if rf, ok := ret.Get(0).(func(*pipeline.Run, bool, ...pg.QOpt) error); ok { + r0 = rf(run, saveSuccessfulTaskRuns, qopts...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // InsertFinishedRuns provides a mock function with given fields: run, saveSuccessfulTaskRuns, qopts func (_m *ORM) InsertFinishedRuns(run []*pipeline.Run, saveSuccessfulTaskRuns bool, qopts ...pg.QOpt) error { _va := make([]interface{}, len(qopts)) diff --git a/core/services/pipeline/mocks/runner.go b/core/services/pipeline/mocks/runner.go index 1de72bbf4c0..3de2703f0c7 100644 --- a/core/services/pipeline/mocks/runner.go +++ b/core/services/pipeline/mocks/runner.go @@ -39,7 +39,7 @@ func (_m *Runner) Close() error { } // ExecuteAndInsertFinishedRun provides a mock function with given fields: ctx, spec, vars, l, saveSuccessfulTaskRuns -func (_m *Runner) ExecuteAndInsertFinishedRun(ctx context.Context, spec pipeline.Spec, vars pipeline.Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (int64, pipeline.FinalResult, error) { +func (_m *Runner) ExecuteAndInsertFinishedRun(ctx context.Context, spec pipeline.Spec, vars pipeline.Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (int64, pipeline.TaskRunResults, error) { ret := _m.Called(ctx, spec, vars, l, saveSuccessfulTaskRuns) if len(ret) == 0 { @@ -47,9 +47,9 @@ func (_m *Runner) ExecuteAndInsertFinishedRun(ctx context.Context, spec pipeline } var r0 int64 - var r1 pipeline.FinalResult + var r1 pipeline.TaskRunResults var r2 error - if rf, ok := ret.Get(0).(func(context.Context, pipeline.Spec, pipeline.Vars, logger.Logger, bool) (int64, pipeline.FinalResult, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, pipeline.Spec, pipeline.Vars, logger.Logger, bool) (int64, pipeline.TaskRunResults, error)); ok { return rf(ctx, spec, vars, l, saveSuccessfulTaskRuns) } if rf, ok := ret.Get(0).(func(context.Context, pipeline.Spec, pipeline.Vars, logger.Logger, bool) int64); ok { @@ -58,10 +58,12 @@ func (_m *Runner) ExecuteAndInsertFinishedRun(ctx context.Context, spec pipeline r0 = ret.Get(0).(int64) } - if rf, ok := ret.Get(1).(func(context.Context, pipeline.Spec, pipeline.Vars, logger.Logger, bool) pipeline.FinalResult); ok { + if rf, ok := ret.Get(1).(func(context.Context, pipeline.Spec, pipeline.Vars, logger.Logger, bool) pipeline.TaskRunResults); ok { r1 = rf(ctx, spec, vars, l, saveSuccessfulTaskRuns) } else { - r1 = ret.Get(1).(pipeline.FinalResult) + if ret.Get(1) != nil { + r1 = ret.Get(1).(pipeline.TaskRunResults) + } } if rf, ok := ret.Get(2).(func(context.Context, pipeline.Spec, pipeline.Vars, logger.Logger, bool) error); ok { diff --git a/core/services/pipeline/models.go b/core/services/pipeline/models.go index e0596700e08..fa1a2605f03 100644 --- a/core/services/pipeline/models.go +++ b/core/services/pipeline/models.go @@ -15,6 +15,7 @@ import ( "gopkg.in/guregu/null.v4" "github.com/smartcontractkit/chainlink-common/pkg/utils/jsonserializable" + "github.com/smartcontractkit/chainlink/v2/core/store/models" ) @@ -46,7 +47,9 @@ func (s *Spec) ParsePipeline() (*Pipeline, error) { type Run struct { ID int64 `json:"-"` + JobID int32 `json:"-"` PipelineSpecID int32 `json:"-"` + PruningKey int32 `json:"-"` // This currently refers to the upstream job ID PipelineSpec Spec `json:"pipelineSpec"` Meta jsonserializable.JSONSerializable `json:"meta"` // The errors are only ever strings diff --git a/core/services/pipeline/orm.go b/core/services/pipeline/orm.go index 602746ffffb..c32693e4db4 100644 --- a/core/services/pipeline/orm.go +++ b/core/services/pipeline/orm.go @@ -82,6 +82,7 @@ type ORM interface { StoreRun(run *Run, qopts ...pg.QOpt) (restart bool, err error) UpdateTaskRunResult(taskID uuid.UUID, result Result) (run Run, start bool, err error) InsertFinishedRun(run *Run, saveSuccessfulTaskRuns bool, qopts ...pg.QOpt) (err error) + InsertFinishedRunWithSpec(run *Run, saveSuccessfulTaskRuns bool, qopts ...pg.QOpt) (err error) // InsertFinishedRuns inserts all the given runs into the database. // If saveSuccessfulTaskRuns is false, only errored runs are saved. @@ -194,11 +195,11 @@ func (o *orm) CreateRun(run *Run, qopts ...pg.QOpt) (err error) { // InsertRun inserts a run into the database func (o *orm) InsertRun(run *Run, qopts ...pg.QOpt) error { if run.Status() == RunStatusCompleted { - defer o.Prune(o.q, run.PipelineSpecID) + defer o.Prune(o.q, run.PruningKey) } q := o.q.WithOpts(qopts...) - sql := `INSERT INTO pipeline_runs (pipeline_spec_id, meta, all_errors, fatal_errors, inputs, outputs, created_at, finished_at, state) - VALUES (:pipeline_spec_id, :meta, :all_errors, :fatal_errors, :inputs, :outputs, :created_at, :finished_at, :state) + sql := `INSERT INTO pipeline_runs (pipeline_spec_id, pruning_key, meta, all_errors, fatal_errors, inputs, outputs, created_at, finished_at, state) + VALUES (:pipeline_spec_id, :pruning_key, :meta, :all_errors, :fatal_errors, :inputs, :outputs, :created_at, :finished_at, :state) RETURNING *;` return q.GetNamed(sql, run, run) } @@ -249,7 +250,7 @@ func (o *orm) StoreRun(run *Run, qopts ...pg.QOpt) (restart bool, err error) { return errors.Wrap(err, "StoreRun") } } else { - defer o.Prune(tx, run.PipelineSpecID) + defer o.Prune(tx, run.PruningKey) // Simply finish the run, no need to do any sort of locking if run.Outputs.Val == nil || len(run.FatalErrors)+len(run.AllErrors) == 0 { return errors.Errorf("run must have both Outputs and Errors, got Outputs: %#v, FatalErrors: %#v, AllErrors: %#v", run.Outputs.Val, run.FatalErrors, run.AllErrors) @@ -299,14 +300,15 @@ func (o *orm) UpdateTaskRunResult(taskID uuid.UUID, result Result) (run Run, sta } err = o.q.Transaction(func(tx pg.Queryer) error { sql := ` - SELECT pipeline_runs.*, pipeline_specs.dot_dag_source "pipeline_spec.dot_dag_source" + SELECT pipeline_runs.*, pipeline_specs.dot_dag_source "pipeline_spec.dot_dag_source", job_pipeline_specs.job_id "job_id" FROM pipeline_runs JOIN pipeline_task_runs ON (pipeline_task_runs.pipeline_run_id = pipeline_runs.id) JOIN pipeline_specs ON (pipeline_specs.id = pipeline_runs.pipeline_spec_id) + JOIN job_pipeline_specs ON (job_pipeline_specs.pipeline_spec_id = pipeline_specs.id) WHERE pipeline_task_runs.id = $1 AND pipeline_runs.state in ('running', 'suspended') FOR UPDATE` if err = tx.Get(&run, sql, taskID); err != nil { - return fmt.Errorf("failed to find pipeline run for ID %s: %w", taskID.String(), err) + return fmt.Errorf("failed to find pipeline run for task ID %s: %w", taskID.String(), err) } // Update the task with result @@ -337,9 +339,9 @@ func (o *orm) InsertFinishedRuns(runs []*Run, saveSuccessfulTaskRuns bool, qopts err := q.Transaction(func(tx pg.Queryer) error { pipelineRunsQuery := ` INSERT INTO pipeline_runs - (pipeline_spec_id, meta, all_errors, fatal_errors, inputs, outputs, created_at, finished_at, state) + (pipeline_spec_id, pruning_key, meta, all_errors, fatal_errors, inputs, outputs, created_at, finished_at, state) VALUES - (:pipeline_spec_id, :meta, :all_errors, :fatal_errors, :inputs, :outputs, :created_at, :finished_at, :state) + (:pipeline_spec_id, :pruning_key, :meta, :all_errors, :fatal_errors, :inputs, :outputs, :created_at, :finished_at, :state) RETURNING id ` rows, errQ := tx.NamedQuery(pipelineRunsQuery, runs) @@ -357,17 +359,17 @@ RETURNING id runIDs = append(runIDs, runID) } - pipelineSpecIDm := make(map[int32]struct{}) + pruningKeysm := make(map[int32]struct{}) for i, run := range runs { - pipelineSpecIDm[run.PipelineSpecID] = struct{}{} + pruningKeysm[run.PruningKey] = struct{}{} for j := range run.PipelineTaskRuns { run.PipelineTaskRuns[j].PipelineRunID = runIDs[i] } } defer func() { - for pipelineSpecID := range pipelineSpecIDm { - o.Prune(tx, pipelineSpecID) + for pruningKey := range pruningKeysm { + o.Prune(tx, pruningKey) } }() @@ -419,10 +421,46 @@ func (o *orm) InsertFinishedRun(run *Run, saveSuccessfulTaskRuns bool, qopts ... return nil } + q := o.q.WithOpts(qopts...) + err = q.Transaction(o.insertFinishedRunTx(run, saveSuccessfulTaskRuns)) + return errors.Wrap(err, "InsertFinishedRun failed") +} + +// InsertFinishedRunWithSpec works like InsertFinishedRun but also inserts the pipeline spec. +func (o *orm) InsertFinishedRunWithSpec(run *Run, saveSuccessfulTaskRuns bool, qopts ...pg.QOpt) (err error) { + if err = o.checkFinishedRun(run, saveSuccessfulTaskRuns); err != nil { + return err + } + + if o.maxSuccessfulRuns == 0 { + // optimisation: avoid persisting if we oughtn't to save any + return nil + } + q := o.q.WithOpts(qopts...) err = q.Transaction(func(tx pg.Queryer) error { - sql := `INSERT INTO pipeline_runs (pipeline_spec_id, meta, all_errors, fatal_errors, inputs, outputs, created_at, finished_at, state) - VALUES (:pipeline_spec_id, :meta, :all_errors, :fatal_errors, :inputs, :outputs, :created_at, :finished_at, :state) + sqlStmt1 := `INSERT INTO pipeline_specs (dot_dag_source, max_task_duration, created_at) + VALUES ($1, $2, NOW()) + RETURNING id;` + err = tx.Get(&run.PipelineSpecID, sqlStmt1, run.PipelineSpec.DotDagSource, run.PipelineSpec.MaxTaskDuration) + if err != nil { + return errors.Wrap(err, "failed to insert pipeline_specs") + } + // This `job_pipeline_specs` record won't be primary since when this method is called, the job already exists, so it will have primary record. + sqlStmt2 := `INSERT INTO job_pipeline_specs (job_id, pipeline_spec_id, is_primary) VALUES ($1, $2, false)` + _, err = tx.Exec(sqlStmt2, run.JobID, run.PipelineSpecID) + if err != nil { + return errors.Wrap(err, "failed to insert job_pipeline_specs") + } + return o.insertFinishedRunTx(run, saveSuccessfulTaskRuns)(tx) + }) + return errors.Wrap(err, "InsertFinishedRun failed") +} + +func (o *orm) insertFinishedRunTx(run *Run, saveSuccessfulTaskRuns bool) func(tx pg.Queryer) error { + return func(tx pg.Queryer) error { + sql := `INSERT INTO pipeline_runs (pipeline_spec_id, pruning_key, meta, all_errors, fatal_errors, inputs, outputs, created_at, finished_at, state) + VALUES (:pipeline_spec_id, :pruning_key, :meta, :all_errors, :fatal_errors, :inputs, :outputs, :created_at, :finished_at, :state) RETURNING id;` query, args, e := tx.BindNamed(sql, run) @@ -430,7 +468,7 @@ func (o *orm) InsertFinishedRun(run *Run, saveSuccessfulTaskRuns bool, qopts ... return errors.Wrap(e, "failed to bind") } - if err = tx.QueryRowx(query, args...).Scan(&run.ID); err != nil { + if err := tx.QueryRowx(query, args...).Scan(&run.ID); err != nil { return errors.Wrap(err, "error inserting finished pipeline_run") } @@ -443,14 +481,13 @@ func (o *orm) InsertFinishedRun(run *Run, saveSuccessfulTaskRuns bool, qopts ... return nil } - defer o.Prune(tx, run.PipelineSpecID) + defer o.Prune(tx, run.PruningKey) sql = ` INSERT INTO pipeline_task_runs (pipeline_run_id, id, type, index, output, error, dot_id, created_at, finished_at) VALUES (:pipeline_run_id, :id, :type, :index, :output, :error, :dot_id, :created_at, :finished_at);` - _, err = tx.NamedExec(sql, run.PipelineTaskRuns) + _, err := tx.NamedExec(sql, run.PipelineTaskRuns) return errors.Wrap(err, "failed to insert pipeline_task_runs") - }) - return errors.Wrap(err, "InsertFinishedRun failed") + } } // DeleteRunsOlderThan deletes all pipeline_runs that have been finished for a certain threshold to free DB space @@ -586,7 +623,19 @@ func loadAssociations(q pg.Queryer, runs []*Run) error { pipelineSpecIDM[run.PipelineSpecID] = Spec{} } } - if err := q.Select(&specs, `SELECT ps.id, ps.dot_dag_source, ps.created_at, ps.max_task_duration, coalesce(jobs.id, 0) "job_id", coalesce(jobs.name, '') "job_name", coalesce(jobs.type, '') "job_type" FROM pipeline_specs ps LEFT OUTER JOIN jobs ON jobs.pipeline_spec_id=ps.id WHERE ps.id = ANY($1)`, pipelineSpecIDs); err != nil { + sqlQuery := `SELECT + ps.id, + ps.dot_dag_source, + ps.created_at, + ps.max_task_duration, + coalesce(jobs.id, 0) "job_id", + coalesce(jobs.name, '') "job_name", + coalesce(jobs.type, '') "job_type" + FROM pipeline_specs ps + LEFT JOIN job_pipeline_specs jps ON jps.pipeline_spec_id=ps.id + LEFT JOIN jobs ON jobs.id=jps.job_id + WHERE ps.id = ANY($1)` + if err := q.Select(&specs, sqlQuery, pipelineSpecIDs); err != nil { return errors.Wrap(err, "failed to postload pipeline_specs for runs") } for _, spec := range specs { @@ -617,14 +666,14 @@ func (o *orm) GetQ() pg.Q { return o.q } -func (o *orm) loadCount(pipelineSpecID int32) *atomic.Uint64 { +func (o *orm) loadCount(jobID int32) *atomic.Uint64 { // fast path; avoids allocation - actual, exists := o.pm.Load(pipelineSpecID) + actual, exists := o.pm.Load(jobID) if exists { return actual.(*atomic.Uint64) } // "slow" path - actual, _ = o.pm.LoadOrStore(pipelineSpecID, new(atomic.Uint64)) + actual, _ = o.pm.LoadOrStore(jobID, new(atomic.Uint64)) return actual.(*atomic.Uint64) } @@ -633,74 +682,74 @@ func (o *orm) loadCount(pipelineSpecID int32) *atomic.Uint64 { const syncLimit = 1000 // Prune attempts to keep the pipeline_runs table capped close to the -// maxSuccessfulRuns length for each pipeline_spec_id. +// maxSuccessfulRuns length for each job_id. // // It does this synchronously for small values and async/sampled for large // values. // // Note this does not guarantee the pipeline_runs table is kept to exactly the // max length, rather that it doesn't excessively larger than it. -func (o *orm) Prune(tx pg.Queryer, pipelineSpecID int32) { - if pipelineSpecID == 0 { - o.lggr.Panic("expected a non-zero pipeline spec ID") +func (o *orm) Prune(tx pg.Queryer, jobID int32) { + if jobID == 0 { + o.lggr.Panic("expected a non-zero job ID") } // For small maxSuccessfulRuns its fast enough to prune every time if o.maxSuccessfulRuns < syncLimit { - o.execPrune(tx, pipelineSpecID) + o.execPrune(tx, jobID) return } // for large maxSuccessfulRuns we do it async on a sampled basis every := o.maxSuccessfulRuns / 20 // it can get up to 5% larger than maxSuccessfulRuns before a prune - cnt := o.loadCount(pipelineSpecID) + cnt := o.loadCount(jobID) val := cnt.Add(1) if val%every == 0 { ok := o.IfStarted(func() { o.wg.Add(1) go func() { - o.lggr.Debugw("Pruning runs", "pipelineSpecID", pipelineSpecID, "count", val, "every", every, "maxSuccessfulRuns", o.maxSuccessfulRuns) + o.lggr.Debugw("Pruning runs", "jobID", jobID, "count", val, "every", every, "maxSuccessfulRuns", o.maxSuccessfulRuns) defer o.wg.Done() // Must not use tx here since it's async and the transaction // could be stale - o.execPrune(o.q.WithOpts(pg.WithLongQueryTimeout()), pipelineSpecID) + o.execPrune(o.q.WithOpts(pg.WithLongQueryTimeout()), jobID) }() }) if !ok { - o.lggr.Warnw("Cannot prune: ORM is not running", "pipelineSpecID", pipelineSpecID) + o.lggr.Warnw("Cannot prune: ORM is not running", "jobID", jobID) return } } } -func (o *orm) execPrune(q pg.Queryer, pipelineSpecID int32) { - res, err := q.ExecContext(o.ctx, `DELETE FROM pipeline_runs WHERE pipeline_spec_id = $1 AND state = $2 AND id NOT IN ( +func (o *orm) execPrune(q pg.Queryer, jobID int32) { + res, err := q.ExecContext(o.ctx, `DELETE FROM pipeline_runs WHERE pruning_key = $1 AND state = $2 AND id NOT IN ( SELECT id FROM pipeline_runs -WHERE pipeline_spec_id = $1 AND state = $2 +WHERE pruning_key = $1 AND state = $2 ORDER BY id DESC LIMIT $3 -)`, pipelineSpecID, RunStatusCompleted, o.maxSuccessfulRuns) +)`, jobID, RunStatusCompleted, o.maxSuccessfulRuns) if err != nil { - o.lggr.Errorw("Failed to prune runs", "err", err, "pipelineSpecID", pipelineSpecID) + o.lggr.Errorw("Failed to prune runs", "err", err, "jobID", jobID) return } rowsAffected, err := res.RowsAffected() if err != nil { - o.lggr.Errorw("Failed to get RowsAffected while pruning runs", "err", err, "pipelineSpecID", pipelineSpecID) + o.lggr.Errorw("Failed to get RowsAffected while pruning runs", "err", err, "jobID", jobID) return } if rowsAffected == 0 { // check the spec still exists and garbage collect if necessary var exists bool - if err := q.GetContext(o.ctx, &exists, `SELECT EXISTS(SELECT * FROM pipeline_specs WHERE id = $1)`, pipelineSpecID); err != nil { - o.lggr.Errorw("Failed check existence of pipeline_spec while pruning runs", "err", err, "pipelineSpecID", pipelineSpecID) + if err := q.GetContext(o.ctx, &exists, `SELECT EXISTS(SELECT ps.* FROM pipeline_specs ps JOIN job_pipeline_specs jps ON (ps.id=jps.pipeline_spec_id) WHERE jps.job_id = $1)`, jobID); err != nil { + o.lggr.Errorw("Failed check existence of pipeline_spec while pruning runs", "err", err, "jobID", jobID) return } if !exists { - o.lggr.Debugw("Pipeline spec no longer exists, removing prune count", "pipelineSpecID", pipelineSpecID) - o.pm.Delete(pipelineSpecID) + o.lggr.Debugw("Pipeline spec no longer exists, removing prune count", "jobID", jobID) + o.pm.Delete(jobID) } } else if o.maxSuccessfulRuns < syncLimit { - o.lggr.Tracew("Pruned runs", "rowsAffected", rowsAffected, "pipelineSpecID", pipelineSpecID) + o.lggr.Tracew("Pruned runs", "rowsAffected", rowsAffected, "jobID", jobID) } else { - o.lggr.Debugw("Pruned runs", "rowsAffected", rowsAffected, "pipelineSpecID", pipelineSpecID) + o.lggr.Debugw("Pruned runs", "rowsAffected", rowsAffected, "jobID", jobID) } } diff --git a/core/services/pipeline/orm_test.go b/core/services/pipeline/orm_test.go index 6a6efa0dc3a..c59fc0c32c6 100644 --- a/core/services/pipeline/orm_test.go +++ b/core/services/pipeline/orm_test.go @@ -14,6 +14,7 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/utils/hex" "github.com/smartcontractkit/chainlink-common/pkg/utils/jsonserializable" + "github.com/smartcontractkit/chainlink/v2/core/bridges" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils/big" "github.com/smartcontractkit/chainlink/v2/core/internal/cltest" @@ -35,7 +36,33 @@ type ormconfig struct { func (ormconfig) JobPipelineMaxSuccessfulRuns() uint64 { return 123456 } -func setupORM(t *testing.T, heavy bool) (db *sqlx.DB, orm pipeline.ORM) { +type testOnlyORM interface { + pipeline.ORM + AddJobPipelineSpecWithoutConstraints(jobID, pipelineSpecID int32) error +} + +type testORM struct { + pipeline.ORM + db *sqlx.DB +} + +func (torm *testORM) AddJobPipelineSpecWithoutConstraints(jobID, pipelineSpecID int32) error { + _, err := torm.db.Exec(`SET CONSTRAINTS fk_job_pipeline_spec_job DEFERRED`) + if err != nil { + return err + } + _, err = torm.db.Exec(`INSERT INTO job_pipeline_specs (job_id,pipeline_spec_id, is_primary) VALUES ($1, $2, false)`, jobID, pipelineSpecID) + if err != nil { + return err + } + return nil +} + +func newTestORM(orm pipeline.ORM, db *sqlx.DB) testOnlyORM { + return &testORM{ORM: orm, db: db} +} + +func setupORM(t *testing.T, heavy bool) (db *sqlx.DB, orm pipeline.ORM, jorm job.ORM) { t.Helper() if heavy { @@ -45,20 +72,26 @@ func setupORM(t *testing.T, heavy bool) (db *sqlx.DB, orm pipeline.ORM) { } cfg := ormconfig{pgtest.NewQConfig(true)} orm = pipeline.NewORM(db, logger.TestLogger(t), cfg, cfg.JobPipelineMaxSuccessfulRuns()) + config := configtest.NewTestGeneralConfig(t) + lggr := logger.TestLogger(t) + keyStore := cltest.NewKeyStore(t, db, config.Database()) + bridgeORM := bridges.NewORM(db, lggr, config.Database()) + + jorm = job.NewORM(db, orm, bridgeORM, keyStore, lggr, config.Database()) return } -func setupHeavyORM(t *testing.T) (db *sqlx.DB, orm pipeline.ORM) { +func setupHeavyORM(t *testing.T) (db *sqlx.DB, orm pipeline.ORM, jorm job.ORM) { return setupORM(t, true) } -func setupLiteORM(t *testing.T) (db *sqlx.DB, orm pipeline.ORM) { +func setupLiteORM(t *testing.T) (db *sqlx.DB, orm pipeline.ORM, jorm job.ORM) { return setupORM(t, false) } func Test_PipelineORM_CreateSpec(t *testing.T) { - db, orm := setupLiteORM(t) + db, orm, _ := setupLiteORM(t) var ( source = "" @@ -80,9 +113,11 @@ func Test_PipelineORM_CreateSpec(t *testing.T) { } func Test_PipelineORM_FindRun(t *testing.T) { - db, orm := setupLiteORM(t) + db, orm, _ := setupLiteORM(t) - _, err := db.Exec(`SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) + _, err := db.Exec(`SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`) + require.NoError(t, err) + _, err = db.Exec(`SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) require.NoError(t, err) expected := mustInsertPipelineRun(t, orm) @@ -107,7 +142,7 @@ func mustInsertPipelineRun(t *testing.T, orm pipeline.ORM) pipeline.Run { return run } -func mustInsertAsyncRun(t *testing.T, orm pipeline.ORM) *pipeline.Run { +func mustInsertAsyncRun(t *testing.T, orm pipeline.ORM, jobORM job.ORM) *pipeline.Run { t.Helper() s := ` @@ -120,17 +155,24 @@ ds1->ds1_parse->ds1_multiply->answer1; answer1 [type=median index=0]; answer2 [type=bridge name=election_winner index=1]; ` - - p, err := pipeline.Parse(s) - require.NoError(t, err) - require.NotNil(t, p) - - maxTaskDuration := models.Interval(1 * time.Minute) - specID, err := orm.CreateSpec(*p, maxTaskDuration) + jb := job.Job{ + Type: job.DirectRequest, + SchemaVersion: 1, + MaxTaskDuration: models.Interval(1 * time.Minute), + DirectRequestSpec: &job.DirectRequestSpec{ + ContractAddress: cltest.NewEIP55Address(), + EVMChainID: (*big.Big)(&cltest.FixtureChainID), + }, + PipelineSpec: &pipeline.Spec{ + DotDagSource: s, + }, + } + err := jobORM.CreateJob(&jb) require.NoError(t, err) run := &pipeline.Run{ - PipelineSpecID: specID, + PipelineSpecID: jb.PipelineSpecID, + PruningKey: jb.ID, State: pipeline.RunStatusRunning, Outputs: jsonserializable.JSONSerializable{}, CreatedAt: time.Now(), @@ -142,9 +184,11 @@ answer2 [type=bridge name=election_winner index=1]; } func TestInsertFinishedRuns(t *testing.T) { - db, orm := setupLiteORM(t) + db, orm, _ := setupLiteORM(t) - _, err := db.Exec(`SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) + _, err := db.Exec(`SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`) + require.NoError(t, err) + _, err = db.Exec(`SET CONSTRAINTS pipeline_runs_pipeline_spec_id_fkey DEFERRED`) require.NoError(t, err) ps := cltest.MustInsertPipelineSpec(t, db) @@ -154,6 +198,7 @@ func TestInsertFinishedRuns(t *testing.T) { now := time.Now() r := pipeline.Run{ PipelineSpecID: ps.ID, + PruningKey: ps.ID, // using the spec ID as the pruning key for test purposes, this is supposed to be the job ID State: pipeline.RunStatusRunning, AllErrors: pipeline.RunErrors{}, FatalErrors: pipeline.RunErrors{}, @@ -198,11 +243,96 @@ func TestInsertFinishedRuns(t *testing.T) { } +func Test_PipelineORM_InsertFinishedRunWithSpec(t *testing.T) { + db, orm, jorm := setupLiteORM(t) + + s := ` +ds1 [type=bridge async=true name="example-bridge" timeout=0 requestData=<{"data": {"coin": "BTC", "market": "USD"}}>] +ds1_parse [type=jsonparse lax=false path="data,result"] +ds1_multiply [type=multiply times=1000000000000000000] + +ds1->ds1_parse->ds1_multiply->answer1; + +answer1 [type=median index=0]; +answer2 [type=bridge name=election_winner index=1]; +` + jb := job.Job{ + Type: job.DirectRequest, + SchemaVersion: 1, + MaxTaskDuration: models.Interval(1 * time.Minute), + DirectRequestSpec: &job.DirectRequestSpec{ + ContractAddress: cltest.NewEIP55Address(), + EVMChainID: (*big.Big)(&cltest.FixtureChainID), + }, + PipelineSpec: &pipeline.Spec{ + DotDagSource: s, + }, + } + err := jorm.CreateJob(&jb) + require.NoError(t, err) + spec := pipeline.Spec{ + DotDagSource: s, + CreatedAt: time.Now(), + MaxTaskDuration: models.Interval(1 * time.Minute), + JobID: jb.ID, + JobName: jb.Name.ValueOrZero(), + JobType: string(jb.Type), + } + defaultVars := map[string]interface{}{ + "jb": map[string]interface{}{ + "databaseID": jb.ID, + "externalJobID": jb.ExternalJobID, + "name": jb.Name.ValueOrZero(), + }, + } + now := time.Now() + run := pipeline.NewRun(spec, pipeline.NewVarsFrom(defaultVars)) + run.PipelineTaskRuns = []pipeline.TaskRun{ + { + ID: uuid.New(), + PipelineRunID: run.ID, + Type: "bridge", + DotID: "ds1", + CreatedAt: now, + FinishedAt: null.TimeFrom(now.Add(100 * time.Millisecond)), + }, + { + ID: uuid.New(), + PipelineRunID: run.ID, + Type: "median", + DotID: "answer2", + Output: jsonserializable.JSONSerializable{Val: 1, Valid: true}, + CreatedAt: now, + FinishedAt: null.TimeFrom(now.Add(200 * time.Millisecond)), + }, + } + run.FinishedAt = null.TimeFrom(now.Add(300 * time.Millisecond)) + run.Outputs = jsonserializable.JSONSerializable{ + Val: "stuff", + Valid: true, + } + run.AllErrors = append(run.AllErrors, null.NewString("", false)) + run.State = pipeline.RunStatusCompleted + + err = orm.InsertFinishedRunWithSpec(run, true) + require.NoError(t, err) + + var pipelineSpec pipeline.Spec + err = db.Get(&pipelineSpec, "SELECT pipeline_specs.* FROM pipeline_specs JOIN job_pipeline_specs ON (pipeline_specs.id = job_pipeline_specs.pipeline_spec_id) WHERE job_pipeline_specs.job_id = $1 AND pipeline_specs.id = $2", jb.ID, run.PipelineSpecID) + require.NoError(t, err) + var jobPipelineSpec job.PipelineSpec + err = db.Get(&jobPipelineSpec, "SELECT * FROM job_pipeline_specs WHERE job_id = $1 AND pipeline_spec_id = $2", jb.ID, pipelineSpec.ID) + require.NoError(t, err) + + assert.Equal(t, run.PipelineSpecID, pipelineSpec.ID) + assert.False(t, jobPipelineSpec.IsPrimary) +} + // Tests that inserting run results, then later updating the run results via upsert will work correctly. func Test_PipelineORM_StoreRun_ShouldUpsert(t *testing.T) { - _, orm := setupLiteORM(t) + _, orm, jorm := setupLiteORM(t) - run := mustInsertAsyncRun(t, orm) + run := mustInsertAsyncRun(t, orm, jorm) now := time.Now() @@ -279,9 +409,9 @@ func Test_PipelineORM_StoreRun_ShouldUpsert(t *testing.T) { // Tests that trying to persist a partial run while new data became available (i.e. via /v2/restart) // will detect a restart and update the result data on the Run. func Test_PipelineORM_StoreRun_DetectsRestarts(t *testing.T) { - db, orm := setupLiteORM(t) + db, orm, jorm := setupLiteORM(t) - run := mustInsertAsyncRun(t, orm) + run := mustInsertAsyncRun(t, orm, jorm) r, err := orm.FindRun(run.ID) require.NoError(t, err) @@ -344,9 +474,9 @@ func Test_PipelineORM_StoreRun_DetectsRestarts(t *testing.T) { } func Test_PipelineORM_StoreRun_UpdateTaskRunResult(t *testing.T) { - _, orm := setupLiteORM(t) + _, orm, jorm := setupLiteORM(t) - run := mustInsertAsyncRun(t, orm) + run := mustInsertAsyncRun(t, orm, jorm) ds1_id := uuid.New() now := time.Now() @@ -425,9 +555,9 @@ func Test_PipelineORM_StoreRun_UpdateTaskRunResult(t *testing.T) { } func Test_PipelineORM_DeleteRun(t *testing.T) { - _, orm := setupLiteORM(t) + _, orm, jorm := setupLiteORM(t) - run := mustInsertAsyncRun(t, orm) + run := mustInsertAsyncRun(t, orm, jorm) now := time.Now() @@ -467,12 +597,12 @@ func Test_PipelineORM_DeleteRun(t *testing.T) { } func Test_PipelineORM_DeleteRunsOlderThan(t *testing.T) { - _, orm := setupHeavyORM(t) + _, orm, jorm := setupHeavyORM(t) var runsIds []int64 for i := 1; i <= 2000; i++ { - run := mustInsertAsyncRun(t, orm) + run := mustInsertAsyncRun(t, orm, jorm) now := time.Now() @@ -556,6 +686,7 @@ func Test_GetUnfinishedRuns_Keepers(t *testing.T) { err = porm.CreateRun(&pipeline.Run{ PipelineSpecID: keeperJob.PipelineSpecID, + PruningKey: keeperJob.ID, State: pipeline.RunStatusRunning, Outputs: jsonserializable.JSONSerializable{}, CreatedAt: time.Now(), @@ -572,6 +703,7 @@ func Test_GetUnfinishedRuns_Keepers(t *testing.T) { err = porm.CreateRun(&pipeline.Run{ PipelineSpecID: keeperJob.PipelineSpecID, + PruningKey: keeperJob.ID, State: pipeline.RunStatusRunning, Outputs: jsonserializable.JSONSerializable{}, CreatedAt: time.Now(), @@ -654,6 +786,7 @@ func Test_GetUnfinishedRuns_DirectRequest(t *testing.T) { err = porm.CreateRun(&pipeline.Run{ PipelineSpecID: drJob.PipelineSpecID, + PruningKey: drJob.ID, State: pipeline.RunStatusRunning, Outputs: jsonserializable.JSONSerializable{}, CreatedAt: time.Now(), @@ -670,6 +803,7 @@ func Test_GetUnfinishedRuns_DirectRequest(t *testing.T) { err = porm.CreateRun(&pipeline.Run{ PipelineSpecID: drJob.PipelineSpecID, + PruningKey: drJob.ID, State: pipeline.RunStatusSuspended, Outputs: jsonserializable.JSONSerializable{}, CreatedAt: time.Now(), @@ -713,43 +847,54 @@ func Test_Prune(t *testing.T) { lggr, observed := logger.TestLoggerObserved(t, zapcore.DebugLevel) db := pgtest.NewSqlxDB(t) porm := pipeline.NewORM(db, lggr, cfg.Database(), cfg.JobPipeline().MaxSuccessfulRuns()) + torm := newTestORM(porm, db) ps1 := cltest.MustInsertPipelineSpec(t, db) + // We need a job_pipeline_specs entry to test the pruning mechanism + err := torm.AddJobPipelineSpecWithoutConstraints(ps1.ID, ps1.ID) + require.NoError(t, err) + + jobID := ps1.ID + t.Run("when there are no runs to prune, does nothing", func(t *testing.T) { - porm.Prune(db, ps1.ID) + porm.Prune(db, jobID) // no error logs; it did nothing assert.Empty(t, observed.All()) }) + _, err = db.Exec(`SET CONSTRAINTS fk_pipeline_runs_pruning_key DEFERRED`) + require.NoError(t, err) + // ps1 has: // - 20 completed runs for i := 0; i < 20; i++ { - cltest.MustInsertPipelineRunWithStatus(t, db, ps1.ID, pipeline.RunStatusCompleted) + cltest.MustInsertPipelineRunWithStatus(t, db, ps1.ID, pipeline.RunStatusCompleted, jobID) } ps2 := cltest.MustInsertPipelineSpec(t, db) + jobID2 := ps2.ID // ps2 has: // - 12 completed runs // - 3 errored runs - // - 3 running run + // - 3 running runs // - 3 suspended run for i := 0; i < 12; i++ { - cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusCompleted) + cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusCompleted, jobID2) } for i := 0; i < 3; i++ { - cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusErrored) + cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusErrored, jobID2) } for i := 0; i < 3; i++ { - cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusRunning) + cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusRunning, jobID2) } for i := 0; i < 3; i++ { - cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusSuspended) + cltest.MustInsertPipelineRunWithStatus(t, db, ps2.ID, pipeline.RunStatusSuspended, jobID2) } - porm.Prune(db, ps2.ID) + porm.Prune(db, jobID2) cnt := pgtest.MustCount(t, db, "SELECT count(*) FROM pipeline_runs WHERE pipeline_spec_id = $1 AND state = $2", ps1.ID, pipeline.RunStatusCompleted) assert.Equal(t, cnt, 20) diff --git a/core/services/pipeline/runner.go b/core/services/pipeline/runner.go index 3b89a1d4945..08d371716fc 100644 --- a/core/services/pipeline/runner.go +++ b/core/services/pipeline/runner.go @@ -17,10 +17,10 @@ import ( "github.com/smartcontractkit/chainlink-common/pkg/services" commonutils "github.com/smartcontractkit/chainlink-common/pkg/utils" "github.com/smartcontractkit/chainlink-common/pkg/utils/jsonserializable" - "github.com/smartcontractkit/chainlink/v2/core/config/env" "github.com/smartcontractkit/chainlink/v2/core/bridges" "github.com/smartcontractkit/chainlink/v2/core/chains/legacyevm" + "github.com/smartcontractkit/chainlink/v2/core/config/env" "github.com/smartcontractkit/chainlink/v2/core/logger" "github.com/smartcontractkit/chainlink/v2/core/recovery" "github.com/smartcontractkit/chainlink/v2/core/services/pg" @@ -49,7 +49,8 @@ type Runner interface { // ExecuteAndInsertFinishedRun executes a new run in-memory according to a spec, persists and saves the results. // It is a combination of ExecuteRun and InsertFinishedRun. // Note that the spec MUST have a DOT graph for this to work. - ExecuteAndInsertFinishedRun(ctx context.Context, spec Spec, vars Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (runID int64, finalResult FinalResult, err error) + // This will persist the Spec in the DB if it doesn't have an ID. + ExecuteAndInsertFinishedRun(ctx context.Context, spec Spec, vars Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (runID int64, results TaskRunResults, err error) OnRunFinished(func(*Run)) InitializePipeline(spec Spec) (*Pipeline, error) @@ -203,6 +204,8 @@ func (err ErrRunPanicked) Error() string { func NewRun(spec Spec, vars Vars) *Run { return &Run{ State: RunStatusRunning, + JobID: spec.JobID, + PruningKey: spec.JobID, PipelineSpec: spec, PipelineSpecID: spec.ID, Inputs: jsonserializable.JSONSerializable{Val: vars.vars, Valid: true}, @@ -551,23 +554,26 @@ func logTaskRunToPrometheus(trr TaskRunResult, spec Spec) { } // ExecuteAndInsertFinishedRun executes a run in memory then inserts the finished run/task run records, returning the final result -func (r *runner) ExecuteAndInsertFinishedRun(ctx context.Context, spec Spec, vars Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (runID int64, finalResult FinalResult, err error) { +func (r *runner) ExecuteAndInsertFinishedRun(ctx context.Context, spec Spec, vars Vars, l logger.Logger, saveSuccessfulTaskRuns bool) (runID int64, results TaskRunResults, err error) { run, trrs, err := r.ExecuteRun(ctx, spec, vars, l) if err != nil { - return 0, finalResult, pkgerrors.Wrapf(err, "error executing run for spec ID %v", spec.ID) + return 0, trrs, pkgerrors.Wrapf(err, "error executing run for spec ID %v", spec.ID) } - finalResult = trrs.FinalResult(l) - // don't insert if we exited early if run.FailSilently { - return 0, finalResult, nil + return 0, trrs, nil } - if err = r.orm.InsertFinishedRun(run, saveSuccessfulTaskRuns); err != nil { - return 0, finalResult, pkgerrors.Wrapf(err, "error inserting finished results for spec ID %v", spec.ID) + if spec.ID == 0 { + err = r.orm.InsertFinishedRunWithSpec(run, saveSuccessfulTaskRuns) + } else { + err = r.orm.InsertFinishedRun(run, saveSuccessfulTaskRuns) + } + if err != nil { + return 0, trrs, pkgerrors.Wrapf(err, "error inserting finished results for spec ID %v", run.PipelineSpecID) } - return run.ID, finalResult, nil + return run.ID, trrs, nil } diff --git a/core/services/pipeline/runner_test.go b/core/services/pipeline/runner_test.go index 7a417ef9d94..cdf63aa975f 100644 --- a/core/services/pipeline/runner_test.go +++ b/core/services/pipeline/runner_test.go @@ -486,7 +486,49 @@ func Test_PipelineRunner_HandleFaultsPersistRun(t *testing.T) { lggr := logger.TestLogger(t) r := pipeline.NewRunner(orm, btORM, cfg.JobPipeline(), cfg.WebServer(), legacyChains, ethKeyStore, nil, lggr, nil, nil) - spec := pipeline.Spec{DotDagSource: ` + spec := pipeline.Spec{ + ID: 1, + DotDagSource: ` +fail_but_i_dont_care [type=fail] +succeed1 [type=memo value=10] +succeed2 [type=memo value=11] +final [type=mean] + +fail_but_i_dont_care -> final; +succeed1 -> final; +succeed2 -> final; +`} + vars := pipeline.NewVarsFrom(nil) + + _, taskResults, err := r.ExecuteAndInsertFinishedRun(testutils.Context(t), spec, vars, lggr, false) + finalResult := taskResults.FinalResult(lggr) + require.NoError(t, err) + assert.True(t, finalResult.HasErrors()) + assert.False(t, finalResult.HasFatalErrors()) + require.Len(t, finalResult.Values, 1) + assert.Equal(t, "10.5", finalResult.Values[0].(decimal.Decimal).String()) +} + +func Test_PipelineRunner_ExecuteAndInsertFinishedRun_SavingTheSpec(t *testing.T) { + db := pgtest.NewSqlxDB(t) + orm := mocks.NewORM(t) + btORM := bridgesMocks.NewORM(t) + q := pg.NewQ(db, logger.TestLogger(t), configtest.NewTestGeneralConfig(t).Database()) + orm.On("GetQ").Return(q).Maybe() + orm.On("InsertFinishedRunWithSpec", mock.Anything, mock.Anything, mock.Anything). + Run(func(args mock.Arguments) { + args.Get(0).(*pipeline.Run).ID = 1 + }). + Return(nil) + cfg := configtest.NewTestGeneralConfig(t) + ethKeyStore := cltest.NewKeyStore(t, db, cfg.Database()).Eth() + relayExtenders := evmtest.NewChainRelayExtenders(t, evmtest.TestChainOpts{DB: db, GeneralConfig: cfg, KeyStore: ethKeyStore}) + legacyChains := evmrelay.NewLegacyChainsFromRelayerExtenders(relayExtenders) + lggr := logger.TestLogger(t) + r := pipeline.NewRunner(orm, btORM, cfg.JobPipeline(), cfg.WebServer(), legacyChains, ethKeyStore, nil, lggr, nil, nil) + + spec := pipeline.Spec{ + DotDagSource: ` fail_but_i_dont_care [type=fail] succeed1 [type=memo value=10] succeed2 [type=memo value=11] @@ -498,7 +540,8 @@ succeed2 -> final; `} vars := pipeline.NewVarsFrom(nil) - _, finalResult, err := r.ExecuteAndInsertFinishedRun(testutils.Context(t), spec, vars, lggr, false) + _, taskResults, err := r.ExecuteAndInsertFinishedRun(testutils.Context(t), spec, vars, lggr, false) + finalResult := taskResults.FinalResult(lggr) require.NoError(t, err) assert.True(t, finalResult.HasErrors()) assert.False(t, finalResult.HasFatalErrors()) diff --git a/core/store/migrate/migrations/0231_dynamic_pipeline_runs.sql b/core/store/migrate/migrations/0231_dynamic_pipeline_runs.sql new file mode 100644 index 00000000000..2e51af8f922 --- /dev/null +++ b/core/store/migrate/migrations/0231_dynamic_pipeline_runs.sql @@ -0,0 +1,48 @@ +-- +goose Up +-- +goose StatementBegin +CREATE TABLE job_pipeline_specs ( + job_id INT NOT NULL, + pipeline_spec_id INT NOT NULL, + is_primary BOOLEAN NOT NULL DEFAULT FALSE, + CONSTRAINT pk_job_pipeline_spec PRIMARY KEY (job_id, pipeline_spec_id), + CONSTRAINT fk_job_pipeline_spec_job FOREIGN KEY (job_id) REFERENCES jobs(id) ON DELETE CASCADE DEFERRABLE, + CONSTRAINT fk_job_pipeline_spec_pipeline_spec FOREIGN KEY (pipeline_spec_id) REFERENCES pipeline_specs(id) ON DELETE CASCADE DEFERRABLE +); + +CREATE UNIQUE INDEX idx_unique_job_pipeline_spec_primary_per_job ON job_pipeline_specs(job_id) WHERE is_primary; + +-- The moment this runs, we only have one job+pipeline_spec combination per job, complying with the unique index. +INSERT INTO job_pipeline_specs (job_id, pipeline_spec_id, is_primary) +SELECT id, pipeline_spec_id, TRUE +FROM jobs; + +ALTER TABLE jobs DROP COLUMN pipeline_spec_id; + +ALTER TABLE pipeline_runs ADD COLUMN pruning_key INT; + +UPDATE pipeline_runs +SET pruning_key = pjps.job_id +FROM job_pipeline_specs pjps +WHERE pjps.pipeline_spec_id = pipeline_runs.pipeline_spec_id; + +ALTER TABLE pipeline_runs ALTER COLUMN pruning_key SET NOT NULL; + +ALTER TABLE pipeline_runs ADD CONSTRAINT fk_pipeline_runs_pruning_key FOREIGN KEY (pruning_key) REFERENCES jobs(id) ON DELETE CASCADE DEFERRABLE; +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin +ALTER TABLE jobs ADD COLUMN pipeline_spec_id INT; + +UPDATE jobs +SET pipeline_spec_id = jps.pipeline_spec_id +FROM job_pipeline_specs jps +WHERE jps.job_id = jobs.id + AND jps.is_primary = TRUE; + +ALTER TABLE pipeline_runs DROP COLUMN pruning_key; + +DROP INDEX IF EXISTS idx_unique_primary_per_job; + +DROP TABLE IF EXISTS job_pipeline_specs; +-- +goose StatementEnd \ No newline at end of file From 1fd2c91c7e145bce9ab32a1c1003ece3e42eaa6a Mon Sep 17 00:00:00 2001 From: frank zhu Date: Tue, 9 Apr 2024 09:31:36 -0700 Subject: [PATCH 08/10] update changelog path (#12757) --- .gitignore | 3 --- docs/CHANGELOG.md => CHANGELOG.md | 4 ---- README.md | 4 ++++ 3 files changed, 4 insertions(+), 7 deletions(-) rename docs/CHANGELOG.md => CHANGELOG.md (99%) diff --git a/.gitignore b/.gitignore index 1091b453326..ccf8a006e7b 100644 --- a/.gitignore +++ b/.gitignore @@ -97,6 +97,3 @@ override*.toml # Pythin venv .venv/ - -# Temp Changelog migration -CHANGELOG.md diff --git a/docs/CHANGELOG.md b/CHANGELOG.md similarity index 99% rename from docs/CHANGELOG.md rename to CHANGELOG.md index b114bfb89f1..b5566c64e58 100644 --- a/docs/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,5 @@ # Changelog Chainlink Core -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). diff --git a/README.md b/README.md index 882f408050a..0d336dd22b3 100644 --- a/README.md +++ b/README.md @@ -304,6 +304,10 @@ To install `changesets`: Either after or before you create a commit, run the `pnpm changeset` command to create an accompanying changeset entry which will reflect on the CHANGELOG for the next release. +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), + +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + ### Tips For more tips on how to build and test Chainlink, see our [development tips page](https://github.com/smartcontractkit/chainlink/wiki/Development-Tips). From e364955e22c6ad26b07ff5f92f7def7629a86f6b Mon Sep 17 00:00:00 2001 From: Cedric Date: Tue, 9 Apr 2024 17:35:06 +0100 Subject: [PATCH 09/10] [KS-136] Update staging (#12703) * Update to latest of chainlink common; this fixes a panic due to incorrect list element handling in the values library, and moves feedIds to bytes rather than uints. * Modify hardcoded workflow with the correct feedIds. * Move all initialization logic of capabilities to `ServicesForSpec` so that we can merge the mercury loop into develop. * Deterministically generate execution IDs, and move the hardcoded workflow to the delegate. --- core/scripts/go.mod | 2 +- core/scripts/go.sum | 4 +- core/services/relay/evm/cap_encoder.go | 42 ++++++--- core/services/relay/evm/cap_encoder_test.go | 51 +++++++++-- core/services/workflows/delegate.go | 88 ++++++++++++++++--- core/services/workflows/engine.go | 64 ++++++++++---- core/services/workflows/engine_test.go | 6 +- core/services/workflows/models.go | 1 + .../workflows/marshalling/workflow_1.yaml | 2 +- go.mod | 2 +- go.sum | 4 +- integration-tests/go.mod | 2 +- integration-tests/go.sum | 4 +- integration-tests/load/go.mod | 2 +- integration-tests/load/go.sum | 4 +- 15 files changed, 212 insertions(+), 66 deletions(-) diff --git a/core/scripts/go.mod b/core/scripts/go.mod index c4e32d4f276..2176664f561 100644 --- a/core/scripts/go.mod +++ b/core/scripts/go.mod @@ -21,7 +21,7 @@ require ( github.com/prometheus/client_golang v1.17.0 github.com/shopspring/decimal v1.3.1 github.com/smartcontractkit/chainlink-automation v1.0.2 - github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 + github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 github.com/smartcontractkit/chainlink-vrf v0.0.0-20231120191722-fef03814f868 github.com/smartcontractkit/chainlink/v2 v2.0.0-00010101000000-000000000000 github.com/smartcontractkit/libocr v0.0.0-20240326191951-2bbe9382d052 diff --git a/core/scripts/go.sum b/core/scripts/go.sum index 98b5142ba0a..5d74d23da68 100644 --- a/core/scripts/go.sum +++ b/core/scripts/go.sum @@ -1187,8 +1187,8 @@ github.com/smartcontractkit/chain-selectors v1.0.10 h1:t9kJeE6B6G+hKD0GYR4kGJSCq github.com/smartcontractkit/chain-selectors v1.0.10/go.mod h1:d4Hi+E1zqjy9HqMkjBE5q1vcG9VGgxf5VxiRHfzi2kE= github.com/smartcontractkit/chainlink-automation v1.0.2 h1:xsfyuswL15q2YBGQT3qn2SBz6fnSKiSW7XZ8IZQLpnI= github.com/smartcontractkit/chainlink-automation v1.0.2/go.mod h1:RjboV0Qd7YP+To+OrzHGXaxUxoSONveCoAK2TQ1INLU= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 h1:fY2wMtlr/VQxPyVVQdi1jFvQHi0VbDnGGVXzLKOZTOY= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 h1:LCVHf/ooB4HDkgfLUq+jK4CuCr6SsdNCQZt3/etJ8ms= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8 h1:I326nw5GwHQHsLKHwtu5Sb9EBLylC8CfUd7BFAS0jtg= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8/go.mod h1:a65NtrK4xZb01mf0dDNghPkN2wXgcqFQ55ADthVBgMc= github.com/smartcontractkit/chainlink-data-streams v0.0.0-20240220203239-09be0ea34540 h1:xFSv8561jsLtF6gYZr/zW2z5qUUAkcFkApin2mnbYTo= diff --git a/core/services/relay/evm/cap_encoder.go b/core/services/relay/evm/cap_encoder.go index b6865096af9..1c7814ab16a 100644 --- a/core/services/relay/evm/cap_encoder.go +++ b/core/services/relay/evm/cap_encoder.go @@ -2,6 +2,7 @@ package evm import ( "context" + "encoding/hex" "encoding/json" "fmt" @@ -75,23 +76,36 @@ func (c *capEncoder) Encode(ctx context.Context, input values.Map) ([]byte, erro return append(append(workflowIDbytes, executionIDBytes...), userPayload...), nil } +func decodeID(input map[string]any, key string) ([]byte, error) { + id, ok := input[key].(string) + if !ok { + return nil, fmt.Errorf("expected %s to be a string", key) + } + + b, err := hex.DecodeString(id) + if err != nil { + return nil, err + } + + if len(b) < 32 { + return nil, fmt.Errorf("incorrect length for id %s, expected 32 bytes, got %d", id, len(b)) + } + + return b, nil +} + // extract workflowID and executionID from the input map, validate and align to 32 bytes // NOTE: consider requiring them to be exactly 32 bytes to avoid issues with padding func extractIDs(input map[string]any) ([]byte, []byte, error) { - workflowID, ok := input[consensustypes.WorkflowIDFieldName].(string) - if !ok { - return nil, nil, fmt.Errorf("expected %s to be a string", consensustypes.WorkflowIDFieldName) - } - executionID, ok := input[consensustypes.ExecutionIDFieldName].(string) - if !ok { - return nil, nil, fmt.Errorf("expected %s to be a string", consensustypes.ExecutionIDFieldName) + workflowID, err := decodeID(input, consensustypes.WorkflowIDFieldName) + if err != nil { + return nil, nil, err } - if len(workflowID) > 32 || len(executionID) > 32 { - return nil, nil, fmt.Errorf("IDs too long: %d, %d", len(workflowID), len(executionID)) + + executionID, err := decodeID(input, consensustypes.ExecutionIDFieldName) + if err != nil { + return nil, nil, err } - alignedWorkflowID := make([]byte, idLen) - copy(alignedWorkflowID, workflowID) - alignedExecutionID := make([]byte, idLen) - copy(alignedExecutionID, executionID) - return alignedWorkflowID, alignedExecutionID, nil + + return workflowID, executionID, nil } diff --git a/core/services/relay/evm/cap_encoder_test.go b/core/services/relay/evm/cap_encoder_test.go index 1d8b6da4610..186968df9b2 100644 --- a/core/services/relay/evm/cap_encoder_test.go +++ b/core/services/relay/evm/cap_encoder_test.go @@ -4,6 +4,7 @@ import ( "encoding/hex" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" consensustypes "github.com/smartcontractkit/chainlink-common/pkg/capabilities/consensus/ocr3/types" @@ -13,10 +14,15 @@ import ( ) var ( - reportA = []byte{0x01, 0x02, 0x03} - reportB = []byte{0xaa, 0xbb, 0xcc, 0xdd} - workflowID = "my_id" - executionID = "my_execution_id" + reportA = []byte{0x01, 0x02, 0x03} + reportB = []byte{0xaa, 0xbb, 0xcc, 0xdd} + + // hex encoded 32 byte strings + workflowID = "15c631d295ef5e32deb99a10ee6804bc4af1385568f9b3363f6552ac6dbb2cef" + executionID = "8d4e66421db647dd916d3ec28d56188c8d7dae5f808e03d03339ed2562f13bb0" + + invalidID = "not_valid" + wrongLength = "8d4e66" ) func TestEVMEncoder(t *testing.T) { @@ -41,8 +47,8 @@ func TestEVMEncoder(t *testing.T) { expected := // start of the outer tuple ((user_fields), workflow_id, workflow_execution_id) - "6d795f6964000000000000000000000000000000000000000000000000000000" + // workflow ID - "6d795f657865637574696f6e5f69640000000000000000000000000000000000" + // execution ID + workflowID + + executionID + // start of the inner tuple (user_fields) "0000000000000000000000000000000000000000000000000000000000000020" + // offset of mercury_reports array "0000000000000000000000000000000000000000000000000000000000000002" + // length of mercury_reports array @@ -56,3 +62,36 @@ func TestEVMEncoder(t *testing.T) { require.Equal(t, expected, hex.EncodeToString(encoded)) } + +func TestEVMEncoder_InvalidIDs(t *testing.T) { + config := map[string]any{ + "abi": "mercury_reports bytes[]", + } + wrapped, err := values.NewMap(config) + require.NoError(t, err) + enc, err := evm.NewEVMEncoder(wrapped) + require.NoError(t, err) + + // output of a DF2.0 aggregator + metadata fields appended by OCR + // using an invalid ID + input := map[string]any{ + "mercury_reports": []any{reportA, reportB}, + consensustypes.WorkflowIDFieldName: invalidID, + consensustypes.ExecutionIDFieldName: executionID, + } + wrapped, err = values.NewMap(input) + require.NoError(t, err) + _, err = enc.Encode(testutils.Context(t), *wrapped) + assert.ErrorContains(t, err, "invalid byte") + + // using valid hex string of wrong length + input = map[string]any{ + "mercury_reports": []any{reportA, reportB}, + consensustypes.WorkflowIDFieldName: wrongLength, + consensustypes.ExecutionIDFieldName: executionID, + } + wrapped, err = values.NewMap(input) + require.NoError(t, err) + _, err = enc.Encode(testutils.Context(t), *wrapped) + assert.ErrorContains(t, err, "incorrect length for id") +} diff --git a/core/services/workflows/delegate.go b/core/services/workflows/delegate.go index fb9540844fa..bde7aa275c4 100644 --- a/core/services/workflows/delegate.go +++ b/core/services/workflows/delegate.go @@ -3,10 +3,13 @@ package workflows import ( "context" "fmt" + "time" "github.com/google/uuid" "github.com/pelletier/go-toml" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/mercury" + "github.com/smartcontractkit/chainlink-common/pkg/capabilities/triggers" "github.com/smartcontractkit/chainlink-common/pkg/types" "github.com/smartcontractkit/chainlink/v2/core/capabilities/targets" "github.com/smartcontractkit/chainlink/v2/core/chains/legacyevm" @@ -17,12 +20,12 @@ import ( const hardcodedWorkflow = ` triggers: - - type: "on_mercury_report" + - type: "mercury-trigger" config: - feedlist: - - "0x1111111111111111111100000000000000000000000000000000000000000000" # ETHUSD - - "0x2222222222222222222200000000000000000000000000000000000000000000" # LINKUSD - - "0x3333333333333333333300000000000000000000000000000000000000000000" # BTCUSD + feedIds: + - "0x1111111111111111111100000000000000000000000000000000000000000000" + - "0x2222222222222222222200000000000000000000000000000000000000000000" + - "0x3333333333333333333300000000000000000000000000000000000000000000" consensus: - type: "offchain_reporting" @@ -64,8 +67,9 @@ targets: ` type Delegate struct { - registry types.CapabilitiesRegistry - logger logger.Logger + registry types.CapabilitiesRegistry + logger logger.Logger + legacyEVMChains legacyevm.LegacyChainContainer } var _ job.Delegate = (*Delegate)(nil) @@ -84,10 +88,25 @@ func (d *Delegate) OnDeleteJob(ctx context.Context, jb job.Job, q pg.Queryer) er // ServicesForSpec satisfies the job.Delegate interface. func (d *Delegate) ServicesForSpec(ctx context.Context, spec job.Job) ([]job.ServiceCtx, error) { + // NOTE: we temporarily do registration inside ServicesForSpec, this will be moved out of job specs in the future + err := targets.InitializeWrite(d.registry, d.legacyEVMChains, d.logger) + if err != nil { + d.logger.Errorw("could not initialize writes", err) + } + + trigger := triggers.NewMercuryTriggerService() + err = d.registry.Add(context.Background(), trigger) + if err != nil { + d.logger.Errorw("could not add mercury trigger to registry", err) + } else { + go mercuryEventLoop(trigger, d.logger) + } + cfg := Config{ - Lggr: d.logger, - Spec: hardcodedWorkflow, - Registry: d.registry, + Lggr: d.logger, + Spec: hardcodedWorkflow, + Registry: d.registry, + WorkflowID: mockedWorkflowID, } engine, err := NewEngine(cfg) if err != nil { @@ -97,10 +116,53 @@ func (d *Delegate) ServicesForSpec(ctx context.Context, spec job.Job) ([]job.Ser } func NewDelegate(logger logger.Logger, registry types.CapabilitiesRegistry, legacyEVMChains legacyevm.LegacyChainContainer) *Delegate { - // NOTE: we temporarily do registration inside NewDelegate, this will be moved out of job specs in the future - _ = targets.InitializeWrite(registry, legacyEVMChains, logger) + return &Delegate{logger: logger, registry: registry, legacyEVMChains: legacyEVMChains} +} + +func mercuryEventLoop(trigger *triggers.MercuryTriggerService, logger logger.Logger) { + sleepSec := 60 * time.Second + ticker := time.NewTicker(sleepSec) + defer ticker.Stop() + + prices := []int64{300000, 2000, 5000000} + + for range ticker.C { + for i := range prices { + prices[i] = prices[i] + 1 + } + + t := time.Now().Round(sleepSec).Unix() + reports, err := emitReports(logger, trigger, t, prices) + if err != nil { + logger.Errorw("failed to process Mercury reports", "err", err, "timestamp", time.Now().Unix(), "payload", reports) + } + } +} + +func emitReports(logger logger.Logger, trigger *triggers.MercuryTriggerService, t int64, prices []int64) ([]triggers.FeedReport, error) { + reports := []triggers.FeedReport{ + { + FeedID: mercury.FeedID("0x1111111111111111111100000000000000000000000000000000000000000000").Bytes(), + FullReport: []byte{}, + BenchmarkPrice: prices[0], + ObservationTimestamp: t, + }, + { + FeedID: mercury.FeedID("0x2222222222222222222200000000000000000000000000000000000000000000").Bytes(), + FullReport: []byte{}, + BenchmarkPrice: prices[1], + ObservationTimestamp: t, + }, + { + FeedID: mercury.FeedID("0x3333333333333333333300000000000000000000000000000000000000000000").Bytes(), + FullReport: []byte{}, + BenchmarkPrice: prices[2], + ObservationTimestamp: t, + }, + } - return &Delegate{logger: logger, registry: registry} + logger.Infow("New set of Mercury reports", "timestamp", time.Now().Unix(), "payload", reports) + return reports, trigger.ProcessReport(reports) } func ValidatedWorkflowSpec(tomlString string) (job.Job, error) { diff --git a/core/services/workflows/engine.go b/core/services/workflows/engine.go index 8198152fb14..f3bb9554095 100644 --- a/core/services/workflows/engine.go +++ b/core/services/workflows/engine.go @@ -2,12 +2,12 @@ package workflows import ( "context" + "crypto/sha256" + "encoding/hex" "fmt" "sync" "time" - "github.com/google/uuid" - "github.com/smartcontractkit/chainlink-common/pkg/capabilities" "github.com/smartcontractkit/chainlink-common/pkg/services" "github.com/smartcontractkit/chainlink-common/pkg/types" @@ -17,9 +17,8 @@ import ( const ( // NOTE: max 32 bytes per ID - consider enforcing exactly 32 bytes? - mockedWorkflowID = "aaaaaaaaaa0000000000000000000000" - mockedExecutionID = "bbbbbbbbbb0000000000000000000000" - mockedTriggerID = "cccccccccc0000000000000000000000" + mockedTriggerID = "cccccccccc0000000000000000000000" + mockedWorkflowID = "15c631d295ef5e32deb99a10ee6804bc4af1385568f9b3363f6552ac6dbb2cef" ) // Engine handles the lifecycle of a single workflow and its executions. @@ -95,7 +94,7 @@ LOOP: return nil } - // If the capability is already cached, that means we've already registered it + // If the capability already exists, that means we've already registered it if s.capability != nil { return nil } @@ -122,14 +121,14 @@ LOOP: reg := capabilities.RegisterToWorkflowRequest{ Metadata: capabilities.RegistrationMetadata{ - WorkflowID: mockedWorkflowID, + WorkflowID: e.workflow.id, }, Config: s.config, } innerErr = cc.RegisterToWorkflow(ctx, reg) if innerErr != nil { - return fmt.Errorf("failed to register to workflow: %+v", reg) + return fmt.Errorf("failed to register to workflow (%+v): %w", reg, innerErr) } s.capability = cc @@ -177,7 +176,7 @@ func (e *Engine) registerTrigger(ctx context.Context, t *triggerCapability) erro triggerRegRequest := capabilities.CapabilityRequest{ Metadata: capabilities.RequestMetadata{ - WorkflowID: mockedWorkflowID, + WorkflowID: e.workflow.id, }, Config: tc, Inputs: triggerInputs, @@ -217,7 +216,20 @@ func (e *Engine) loop(ctx context.Context) { continue } - err := e.startExecution(ctx, resp.Value) + te := &capabilities.TriggerEvent{} + err := resp.Value.UnwrapTo(te) + if err != nil { + e.logger.Errorf("could not unwrap trigger event", resp.Err) + continue + } + + executionID, err := generateExecutionID(e.workflow.id, te.ID) + if err != nil { + e.logger.Errorf("could not generate execution ID", resp.Err) + continue + } + + err = e.startExecution(ctx, executionID, resp.Value) if err != nil { e.logger.Errorf("failed to start execution: %w", err) } @@ -245,9 +257,23 @@ func (e *Engine) loop(ctx context.Context) { } } +func generateExecutionID(workflowID, eventID string) (string, error) { + s := sha256.New() + _, err := s.Write([]byte(workflowID)) + if err != nil { + return "", err + } + + _, err = s.Write([]byte(eventID)) + if err != nil { + return "", err + } + + return hex.EncodeToString(s.Sum(nil)), nil +} + // startExecution kicks off a new workflow execution when a trigger event is received. -func (e *Engine) startExecution(ctx context.Context, event values.Value) error { - executionID := uuid.New().String() +func (e *Engine) startExecution(ctx context.Context, executionID string, event values.Value) error { e.logger.Debugw("executing on a trigger event", "event", event, "executionID", executionID) ec := &executionState{ steps: map[string]*stepState{ @@ -258,7 +284,7 @@ func (e *Engine) startExecution(ctx context.Context, event values.Value) error { status: statusCompleted, }, }, - workflowID: mockedWorkflowID, + workflowID: e.workflow.id, executionID: executionID, status: statusStarted, } @@ -375,6 +401,7 @@ func (e *Engine) queueIfReady(state executionState, step *step) { } func (e *Engine) finishExecution(ctx context.Context, executionID string, status string) error { + e.logger.Infow("finishing execution", "executionID", executionID, "status", status) err := e.executionStates.updateStatus(ctx, executionID, status) if err != nil { return err @@ -404,11 +431,11 @@ func (e *Engine) workerForStepRequest(ctx context.Context, msg stepRequest) { inputs, outputs, err := e.executeStep(ctx, msg) if err != nil { - e.logger.Errorf("error executing step request: %w", err, "executionID", msg.state.executionID, "stepRef", msg.stepRef) + e.logger.Errorf("error executing step request: %s", err, "executionID", msg.state.executionID, "stepRef", msg.stepRef) stepState.outputs.err = err stepState.status = statusErrored } else { - e.logger.Debugw("step executed successfully", "executionID", msg.state.executionID, "stepRef", msg.stepRef, "outputs", outputs) + e.logger.Infow("step executed successfully", "executionID", msg.state.executionID, "stepRef", msg.stepRef, "outputs", outputs) stepState.outputs.value = outputs stepState.status = statusCompleted } @@ -479,7 +506,7 @@ func (e *Engine) deregisterTrigger(ctx context.Context, t *triggerCapability) er } deregRequest := capabilities.CapabilityRequest{ Metadata: capabilities.RequestMetadata{ - WorkflowID: mockedWorkflowID, + WorkflowID: e.workflow.id, }, Inputs: triggerInputs, Config: t.config, @@ -511,7 +538,7 @@ func (e *Engine) Close() error { reg := capabilities.UnregisterFromWorkflowRequest{ Metadata: capabilities.RegistrationMetadata{ - WorkflowID: mockedWorkflowID, + WorkflowID: e.workflow.id, }, Config: s.config, } @@ -533,6 +560,7 @@ func (e *Engine) Close() error { type Config struct { Spec string + WorkflowID string Lggr logger.Logger Registry types.CapabilitiesRegistry MaxWorkerLimit int @@ -572,6 +600,8 @@ func NewEngine(cfg Config) (engine *Engine, err error) { return nil, err } + workflow.id = cfg.WorkflowID + // Instantiate semaphore to put a limit on the number of workers newWorkerCh := make(chan struct{}, cfg.MaxWorkerLimit) for i := 0; i < cfg.MaxWorkerLimit; i++ { diff --git a/core/services/workflows/engine_test.go b/core/services/workflows/engine_test.go index e456eefb729..57de4cb6faa 100644 --- a/core/services/workflows/engine_test.go +++ b/core/services/workflows/engine_test.go @@ -120,7 +120,7 @@ func TestEngineWithHardcodedWorkflow(t *testing.T) { const ( simpleWorkflow = ` triggers: - - type: "on_mercury_report" + - type: "mercury-trigger" config: feedlist: - "0x1111111111111111111100000000000000000000000000000000000000000000" # ETHUSD @@ -163,7 +163,7 @@ targets: func mockTrigger(t *testing.T) (capabilities.TriggerCapability, capabilities.CapabilityResponse) { mt := &mockTriggerCapability{ CapabilityInfo: capabilities.MustNewCapabilityInfo( - "on_mercury_report", + "mercury-trigger", capabilities.CapabilityTypeTrigger, "issues a trigger when a mercury report is received.", "v1.0.0", @@ -273,7 +273,7 @@ func TestEngine_ErrorsTheWorkflowIfAStepErrors(t *testing.T) { const ( multiStepWorkflow = ` triggers: - - type: "on_mercury_report" + - type: "mercury-trigger" config: feedlist: - "0x1111111111111111111100000000000000000000000000000000000000000000" # ETHUSD diff --git a/core/services/workflows/models.go b/core/services/workflows/models.go index 9c1c56d6054..e6c92a641e4 100644 --- a/core/services/workflows/models.go +++ b/core/services/workflows/models.go @@ -47,6 +47,7 @@ func (w *workflowSpec) steps() []stepDefinition { // treated differently due to their nature of being the starting // point of a workflow. type workflow struct { + id string graph.Graph[string, *step] triggers []*triggerCapability diff --git a/core/services/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml b/core/services/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml index 0fab758ac44..cbd33f4a90e 100644 --- a/core/services/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml +++ b/core/services/workflows/testdata/fixtures/workflows/marshalling/workflow_1.yaml @@ -1,5 +1,5 @@ triggers: - - type: on_mercury_report@1 + - type: mercury-trigger@1 ref: report_data config: boolean_coercion: diff --git a/go.mod b/go.mod index 38f18481e66..32ba665e937 100644 --- a/go.mod +++ b/go.mod @@ -72,7 +72,7 @@ require ( github.com/shopspring/decimal v1.3.1 github.com/smartcontractkit/chain-selectors v1.0.10 github.com/smartcontractkit/chainlink-automation v1.0.2 - github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 + github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8 github.com/smartcontractkit/chainlink-data-streams v0.0.0-20240220203239-09be0ea34540 github.com/smartcontractkit/chainlink-feeds v0.0.0-20240119021347-3c541a78cdb8 diff --git a/go.sum b/go.sum index 3bba85e2c95..628976fa1eb 100644 --- a/go.sum +++ b/go.sum @@ -1182,8 +1182,8 @@ github.com/smartcontractkit/chain-selectors v1.0.10 h1:t9kJeE6B6G+hKD0GYR4kGJSCq github.com/smartcontractkit/chain-selectors v1.0.10/go.mod h1:d4Hi+E1zqjy9HqMkjBE5q1vcG9VGgxf5VxiRHfzi2kE= github.com/smartcontractkit/chainlink-automation v1.0.2 h1:xsfyuswL15q2YBGQT3qn2SBz6fnSKiSW7XZ8IZQLpnI= github.com/smartcontractkit/chainlink-automation v1.0.2/go.mod h1:RjboV0Qd7YP+To+OrzHGXaxUxoSONveCoAK2TQ1INLU= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 h1:fY2wMtlr/VQxPyVVQdi1jFvQHi0VbDnGGVXzLKOZTOY= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 h1:LCVHf/ooB4HDkgfLUq+jK4CuCr6SsdNCQZt3/etJ8ms= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8 h1:I326nw5GwHQHsLKHwtu5Sb9EBLylC8CfUd7BFAS0jtg= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8/go.mod h1:a65NtrK4xZb01mf0dDNghPkN2wXgcqFQ55ADthVBgMc= github.com/smartcontractkit/chainlink-data-streams v0.0.0-20240220203239-09be0ea34540 h1:xFSv8561jsLtF6gYZr/zW2z5qUUAkcFkApin2mnbYTo= diff --git a/integration-tests/go.mod b/integration-tests/go.mod index 81d6a805abd..52b5a006662 100644 --- a/integration-tests/go.mod +++ b/integration-tests/go.mod @@ -24,7 +24,7 @@ require ( github.com/segmentio/ksuid v1.0.4 github.com/slack-go/slack v0.12.2 github.com/smartcontractkit/chainlink-automation v1.0.2 - github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 + github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 github.com/smartcontractkit/chainlink-testing-framework v1.28.1 github.com/smartcontractkit/chainlink-vrf v0.0.0-20231120191722-fef03814f868 github.com/smartcontractkit/chainlink/v2 v2.0.0-00010101000000-000000000000 diff --git a/integration-tests/go.sum b/integration-tests/go.sum index 16af04e283c..4ce49bd0af4 100644 --- a/integration-tests/go.sum +++ b/integration-tests/go.sum @@ -1521,8 +1521,8 @@ github.com/smartcontractkit/chain-selectors v1.0.10 h1:t9kJeE6B6G+hKD0GYR4kGJSCq github.com/smartcontractkit/chain-selectors v1.0.10/go.mod h1:d4Hi+E1zqjy9HqMkjBE5q1vcG9VGgxf5VxiRHfzi2kE= github.com/smartcontractkit/chainlink-automation v1.0.2 h1:xsfyuswL15q2YBGQT3qn2SBz6fnSKiSW7XZ8IZQLpnI= github.com/smartcontractkit/chainlink-automation v1.0.2/go.mod h1:RjboV0Qd7YP+To+OrzHGXaxUxoSONveCoAK2TQ1INLU= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 h1:fY2wMtlr/VQxPyVVQdi1jFvQHi0VbDnGGVXzLKOZTOY= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 h1:LCVHf/ooB4HDkgfLUq+jK4CuCr6SsdNCQZt3/etJ8ms= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8 h1:I326nw5GwHQHsLKHwtu5Sb9EBLylC8CfUd7BFAS0jtg= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8/go.mod h1:a65NtrK4xZb01mf0dDNghPkN2wXgcqFQ55ADthVBgMc= github.com/smartcontractkit/chainlink-data-streams v0.0.0-20240220203239-09be0ea34540 h1:xFSv8561jsLtF6gYZr/zW2z5qUUAkcFkApin2mnbYTo= diff --git a/integration-tests/load/go.mod b/integration-tests/load/go.mod index 4c7c2a1367a..cd321ab240a 100644 --- a/integration-tests/load/go.mod +++ b/integration-tests/load/go.mod @@ -16,7 +16,7 @@ require ( github.com/rs/zerolog v1.30.0 github.com/slack-go/slack v0.12.2 github.com/smartcontractkit/chainlink-automation v1.0.2 - github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 + github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 github.com/smartcontractkit/chainlink-testing-framework v1.28.1 github.com/smartcontractkit/chainlink/integration-tests v0.0.0-20240214231432-4ad5eb95178c github.com/smartcontractkit/chainlink/v2 v2.9.0-beta0.0.20240216210048-da02459ddad8 diff --git a/integration-tests/load/go.sum b/integration-tests/load/go.sum index c58a91e9197..71fae4d3857 100644 --- a/integration-tests/load/go.sum +++ b/integration-tests/load/go.sum @@ -1504,8 +1504,8 @@ github.com/smartcontractkit/chain-selectors v1.0.10 h1:t9kJeE6B6G+hKD0GYR4kGJSCq github.com/smartcontractkit/chain-selectors v1.0.10/go.mod h1:d4Hi+E1zqjy9HqMkjBE5q1vcG9VGgxf5VxiRHfzi2kE= github.com/smartcontractkit/chainlink-automation v1.0.2 h1:xsfyuswL15q2YBGQT3qn2SBz6fnSKiSW7XZ8IZQLpnI= github.com/smartcontractkit/chainlink-automation v1.0.2/go.mod h1:RjboV0Qd7YP+To+OrzHGXaxUxoSONveCoAK2TQ1INLU= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25 h1:fY2wMtlr/VQxPyVVQdi1jFvQHi0VbDnGGVXzLKOZTOY= -github.com/smartcontractkit/chainlink-common v0.1.7-0.20240404141006-77085a02ce25/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3 h1:LCVHf/ooB4HDkgfLUq+jK4CuCr6SsdNCQZt3/etJ8ms= +github.com/smartcontractkit/chainlink-common v0.1.7-0.20240405173118-f5bf144ec6b3/go.mod h1:kstYjAGqBswdZpl7YkSPeXBDVwaY1VaR6tUMPWl8ykA= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8 h1:I326nw5GwHQHsLKHwtu5Sb9EBLylC8CfUd7BFAS0jtg= github.com/smartcontractkit/chainlink-cosmos v0.4.1-0.20240213120401-01a23955f9f8/go.mod h1:a65NtrK4xZb01mf0dDNghPkN2wXgcqFQ55ADthVBgMc= github.com/smartcontractkit/chainlink-data-streams v0.0.0-20240220203239-09be0ea34540 h1:xFSv8561jsLtF6gYZr/zW2z5qUUAkcFkApin2mnbYTo= From a21c5f447990953fd492c91313a4759a35716180 Mon Sep 17 00:00:00 2001 From: Tate Date: Tue, 9 Apr 2024 12:04:33 -0600 Subject: [PATCH 10/10] Update Nix apple sdk usage (#12758) - the previous usage is a pattern that is being deprecated in nix - no longer need to add CGO_ENABLED=0 with this method - bumps flake.lock to get golang 1.21.9 in nix shells --- flake.lock | 12 ++++++------ flake.nix | 5 +---- shell.nix | 8 ++++++-- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/flake.lock b/flake.lock index bce30e58f58..94ed8931096 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1705309234, - "narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=", + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", "type": "github" }, "original": { @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1707092692, - "narHash": "sha256-ZbHsm+mGk/izkWtT4xwwqz38fdlwu7nUUKXTOmm4SyE=", + "lastModified": 1712439257, + "narHash": "sha256-aSpiNepFOMk9932HOax0XwNxbA38GOUVOiXfUVPOrck=", "owner": "nixos", "repo": "nixpkgs", - "rev": "faf912b086576fd1a15fca610166c98d47bc667e", + "rev": "ff0dbd94265ac470dda06a657d5fe49de93b4599", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 928b165ca35..b4fd137da79 100644 --- a/flake.nix +++ b/flake.nix @@ -10,12 +10,9 @@ flake-utils.lib.eachDefaultSystem (system: let pkgs = import nixpkgs { inherit system; overlays = [ ]; }; - # The current default sdk for macOS fails to compile go projects, so we use a newer one for now. - # This has no effect on other platforms. - callPackage = pkgs.darwin.apple_sdk_11_0.callPackage or pkgs.callPackage; in rec { - devShell = callPackage ./shell.nix { + devShell = pkgs.callPackage ./shell.nix { inherit pkgs; }; formatter = pkgs.nixpkgs-fmt; diff --git a/shell.nix b/shell.nix index 7b64b7f58a1..ca785283fd5 100644 --- a/shell.nix +++ b/shell.nix @@ -5,8 +5,13 @@ let postgresql = postgresql_14; nodejs = nodejs-18_x; nodePackages = pkgs.nodePackages.override { inherit nodejs; }; + + mkShell' = mkShell.override { + # The current nix default sdk for macOS fails to compile go projects, so we use a newer one for now. + stdenv = if stdenv.isDarwin then overrideSDK stdenv "11.0" else stdenv; + }; in -mkShell { +mkShell' { nativeBuildInputs = [ go goreleaser @@ -46,7 +51,6 @@ mkShell { ]; LD_LIBRARY_PATH = "${stdenv.cc.cc.lib}/lib64:$LD_LIBRARY_PATH"; GOROOT = "${go}/share/go"; - CGO_ENABLED = 0; PGDATA = "db"; CL_DATABASE_URL = "postgresql://chainlink:chainlink@localhost:5432/chainlink_test?sslmode=disable";