From 5b503a3c02801809533012cd73b5f7c492f73ac8 Mon Sep 17 00:00:00 2001 From: Awbrey Hughlett Date: Tue, 7 Jan 2025 12:52:19 -0500 Subject: [PATCH 1/8] Custom Fallback TOML Config (#15617) * Custom Fallback TOML Config This commit provides using an existing env var `CL_CHAIN_DEFAULTS` as a path to a custom `fallback.toml`. This allows plugins to define their own set of fallback options apart from the core node which override the default fallback options. * collapse helper functions into single helper function and reduce indirection * fix test --- .changeset/tall-falcons-yawn.md | 5 + core/chains/evm/config/toml/defaults.go | 146 +++-- .../node/validate/fallback-override.txtar | 552 ++++++++++++++++++ 3 files changed, 640 insertions(+), 63 deletions(-) create mode 100644 .changeset/tall-falcons-yawn.md create mode 100644 testdata/scripts/node/validate/fallback-override.txtar diff --git a/.changeset/tall-falcons-yawn.md b/.changeset/tall-falcons-yawn.md new file mode 100644 index 00000000000..98b90e5994b --- /dev/null +++ b/.changeset/tall-falcons-yawn.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#added the ability to define a fallback.toml override config using CL_CHAIN_DEFAULTS env var diff --git a/core/chains/evm/config/toml/defaults.go b/core/chains/evm/config/toml/defaults.go index 6f03575056b..60da9bded1b 100644 --- a/core/chains/evm/config/toml/defaults.go +++ b/core/chains/evm/config/toml/defaults.go @@ -4,7 +4,7 @@ import ( "bytes" "embed" "fmt" - "io" + "io/fs" "log" "os" "path/filepath" @@ -19,7 +19,6 @@ import ( ) var ( - //go:embed defaults/*.toml defaultsFS embed.FS fallback Chain @@ -33,48 +32,24 @@ var ( ) func init() { - // read the defaults first + var ( + fb *Chain + err error + ) - fes, err := defaultsFS.ReadDir("defaults") + // read all default configs + DefaultIDs, defaultNames, defaults, fb, err = initDefaults(defaultsFS.ReadDir, defaultsFS.ReadFile, "defaults") if err != nil { - log.Fatalf("failed to read defaults/: %v", err) + log.Fatalf("failed to read defaults: %s", err) } - for _, fe := range fes { - path := filepath.Join("defaults", fe.Name()) - b, err2 := defaultsFS.ReadFile(path) - if err2 != nil { - log.Fatalf("failed to read %q: %v", path, err2) - } - var config = struct { - ChainID *big.Big - Chain - }{} - if err3 := cconfig.DecodeTOML(bytes.NewReader(b), &config); err3 != nil { - log.Fatalf("failed to decode %q: %v", path, err3) - } - if fe.Name() == "fallback.toml" { - if config.ChainID != nil { - log.Fatalf("fallback ChainID must be nil, not: %s", config.ChainID) - } - fallback = config.Chain - continue - } - if config.ChainID == nil { - log.Fatalf("missing ChainID: %s", path) - } - DefaultIDs = append(DefaultIDs, config.ChainID) - id := config.ChainID.String() - if _, ok := defaults[id]; ok { - log.Fatalf("%q contains duplicate ChainID: %s", path, id) - } - defaults[id] = config.Chain - defaultNames[id] = strings.ReplaceAll(strings.TrimSuffix(fe.Name(), ".toml"), "_", " ") + if fb == nil { + log.Fatal("failed to set fallback chain config") } - slices.SortFunc(DefaultIDs, func(a, b *big.Big) int { - return a.Cmp(b) - }) + fallback = *fb + + // check for and apply any overrides // read the custom defaults overrides dir := env.CustomDefaults.Get() if dir == "" { @@ -83,54 +58,99 @@ func init() { } // use evm overrides specifically - evmDir := fmt.Sprintf("%s/evm", dir) + _, _, customDefaults, fb, err = initDefaults(os.ReadDir, os.ReadFile, dir+"/evm") + if err != nil { + log.Fatalf("failed to read custom overrides: %s", err) + } - // Read directory contents for evm only - entries, err := os.ReadDir(evmDir) + if fb != nil { + fallback = *fb + } +} + +func initDefaults( + dirReader func(name string) ([]fs.DirEntry, error), + fileReader func(name string) ([]byte, error), + root string, +) ([]*big.Big, map[string]string, map[string]Chain, *Chain, error) { + entries, err := dirReader(root) if err != nil { - log.Fatalf("error reading evm custom defaults override directory: %v", err) - return + return nil, nil, nil, nil, fmt.Errorf("failed to read directory: %w", err) } + var fb *Chain + + ids := make([]*big.Big, 0) + configs := make(map[string]Chain) + names := make(map[string]string) + for _, entry := range entries { if entry.IsDir() { // Skip directories continue } - path := evmDir + "/" + entry.Name() - file, err := os.Open(path) - if err != nil { - log.Fatalf("error opening file (name: %v) in custom defaults override directory: %v", entry.Name(), err) - } + // read the file to bytes + path := filepath.Join(root, entry.Name()) - // Read file contents - b, err := io.ReadAll(file) - file.Close() + chainID, chain, err := readConfig(path, fileReader) if err != nil { - log.Fatalf("error reading file (name: %v) contents in custom defaults override directory: %v", entry.Name(), err) + return nil, nil, nil, nil, err } - var config = struct { - ChainID *big.Big - Chain - }{} + if entry.Name() == "fallback.toml" { + if chainID != nil { + return nil, nil, nil, nil, fmt.Errorf("fallback ChainID must be nil: found: %s", chainID) + } + + fb = &chain - if err := cconfig.DecodeTOML(bytes.NewReader(b), &config); err != nil { - log.Fatalf("failed to decode %q in custom defaults override directory: %v", path, err) + continue } - if config.ChainID == nil { - log.Fatalf("missing ChainID in: %s in custom defaults override directory. exiting", path) + // ensure ChainID is set + if chainID == nil { + return nil, nil, nil, nil, fmt.Errorf("missing ChainID: %s", path) } - id := config.ChainID.String() + ids = append(ids, chainID) - if _, ok := customDefaults[id]; ok { + // ChainID as a default should not be duplicated + id := chainID.String() + if _, ok := configs[id]; ok { log.Fatalf("%q contains duplicate ChainID: %s", path, id) } - customDefaults[id] = config.Chain + + // set lookups + configs[id] = chain + names[id] = strings.ReplaceAll(strings.TrimSuffix(entry.Name(), ".toml"), "_", " ") } + + // sort IDs in numeric order + slices.SortFunc(ids, func(a, b *big.Big) int { + return a.Cmp(b) + }) + + return ids, names, configs, fb, nil +} + +func readConfig(path string, reader func(name string) ([]byte, error)) (*big.Big, Chain, error) { + bts, err := reader(path) + if err != nil { + return nil, Chain{}, fmt.Errorf("error reading file: %w", err) + } + + var config = struct { + ChainID *big.Big + Chain + }{} + + // decode from toml to a chain config + if err := cconfig.DecodeTOML(bytes.NewReader(bts), &config); err != nil { + return nil, Chain{}, fmt.Errorf("error in TOML decoding %s: %w", path, err) + } + + return config.ChainID, config.Chain, nil } // DefaultsNamed returns the default Chain values, optionally for the given chainID, as well as a name if the chainID is known. diff --git a/testdata/scripts/node/validate/fallback-override.txtar b/testdata/scripts/node/validate/fallback-override.txtar new file mode 100644 index 00000000000..91feb48693d --- /dev/null +++ b/testdata/scripts/node/validate/fallback-override.txtar @@ -0,0 +1,552 @@ +# test with defaults +env CL_CHAIN_DEFAULTS= +exec chainlink node -c config.toml -s secrets.toml validate +cmp stdout out.txt + +# test with fallback override +env CL_CHAIN_DEFAULTS=default_overrides +exec chainlink node -c config.toml -s secrets.toml validate +! cmp stdout out.txt + +-- default_overrides/evm/fallback.toml -- +AutoCreateKey = true +BlockBackfillDepth = 1000000 +BlockBackfillSkip = false +FinalityDepth = 50 +FinalityTagEnabled = false +LogBackfillBatchSize = 1000 +LogPollInterval = '15s' +LogKeepBlocksDepth = 100000 +LogPrunePageSize = 0 +BackupLogPollerBlockDelay = 100 +MinContractPayment = '.00001 link' +MinIncomingConfirmations = 3 +NonceAutoSync = true +NoNewHeadsThreshold = '3m' +RPCDefaultBatchSize = 250 +RPCBlockQueryDelay = 1 +FinalizedBlockOffset = 0 +NoNewFinalizedHeadsThreshold = '0' +LogBroadcasterEnabled = true + +[Transactions] +ForwardersEnabled = false +MaxInFlight = 16 +MaxQueued = 250 +ReaperInterval = '1h' +ReaperThreshold = '168h' +ResendAfterThreshold = '1m' + +[Transactions.AutoPurge] +Enabled = false + +[BalanceMonitor] +Enabled = true + +[GasEstimator] +Mode = 'BlockHistory' +PriceDefault = '20 gwei' +PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' +PriceMin = '1 gwei' +LimitDefault = 500_000 +LimitMax = 500_000 +LimitMultiplier = '1' +LimitTransfer = 21_000 +BumpMin = '5 gwei' +BumpPercent = 20 +BumpThreshold = 3 +EIP1559DynamicFees = false +FeeCapDefault = '100 gwei' +TipCapDefault = '1' +TipCapMin = '1' +EstimateLimit = false + +[GasEstimator.BlockHistory] +BatchSize = 25 +BlockHistorySize = 8 +CheckInclusionBlocks = 12 +CheckInclusionPercentile = 90 +TransactionPercentile = 60 + +[GasEstimator.FeeHistory] +CacheTimeout = '10s' + +[HeadTracker] +HistoryDepth = 100 +MaxBufferSize = 3 +SamplingInterval = '1s' +FinalityTagBypass = true +MaxAllowedFinalityDepth = 10000 +PersistenceEnabled = true + +[NodePool] +PollFailureThreshold = 5 +PollInterval = '10s' +SelectionMode = 'HighestHead' +SyncThreshold = 5 +LeaseDuration = '0s' +NodeIsSyncingEnabled = false +FinalizedBlockPollInterval = '5s' +EnforceRepeatableRead = true +DeathDeclarationDelay = '1m' +NewHeadsPollInterval = '0s' + +[OCR] +ContractConfirmations = 4 +ContractTransmitterTransmitTimeout = '10s' +DatabaseTimeout = '10s' +DeltaCOverride = '168h' +DeltaCJitterOverride = '1h' +ObservationGracePeriod = '1s' + +[OCR2.Automation] +GasLimit = 5400000 + +[Workflow] +GasLimitDefault = 400_000 + +-- config.toml -- +Log.Level = 'debug' + +[[EVM]] +ChainID = '1' + +[[EVM.Nodes]] +Name = 'fake' +WSURL = 'wss://foo.bar/ws' +HTTPURL = 'https://foo.bar' + +-- secrets.toml -- +[Database] +URL = 'postgresql://user:pass1234567890abcd@localhost:5432/dbname?sslmode=disable' + +[Password] +Keystore = 'keystore_pass' + +-- out.txt -- +# Secrets: +[Database] +URL = 'xxxxx' +AllowSimplePasswords = false + +[Password] +Keystore = 'xxxxx' + +# Input Configuration: +[Log] +Level = 'debug' + +[[EVM]] +ChainID = '1' + +[[EVM.Nodes]] +Name = 'fake' +WSURL = 'wss://foo.bar/ws' +HTTPURL = 'https://foo.bar' + +# Effective Configuration, with defaults applied: +InsecureFastScrypt = false +RootDir = '~/.chainlink' +ShutdownGracePeriod = '5s' + +[Feature] +FeedsManager = true +LogPoller = false +UICSAKeys = false +CCIP = true +MultiFeedsManagers = false + +[Database] +DefaultIdleInTxSessionTimeout = '1h0m0s' +DefaultLockTimeout = '15s' +DefaultQueryTimeout = '10s' +LogQueries = false +MaxIdleConns = 10 +MaxOpenConns = 100 +MigrateOnStartup = true + +[Database.Backup] +Dir = '' +Frequency = '1h0m0s' +Mode = 'none' +OnVersionUpgrade = true + +[Database.Listener] +MaxReconnectDuration = '10m0s' +MinReconnectInterval = '1m0s' +FallbackPollInterval = '30s' + +[Database.Lock] +Enabled = true +LeaseDuration = '10s' +LeaseRefreshInterval = '1s' + +[TelemetryIngress] +UniConn = false +Logging = false +BufferSize = 100 +MaxBatchSize = 50 +SendInterval = '500ms' +SendTimeout = '10s' +UseBatchSend = true + +[AuditLogger] +Enabled = false +ForwardToUrl = '' +JsonWrapperKey = '' +Headers = [] + +[Log] +Level = 'debug' +JSONConsole = false +UnixTS = false + +[Log.File] +Dir = '' +MaxSize = '5.12gb' +MaxAgeDays = 0 +MaxBackups = 1 + +[WebServer] +AuthenticationMethod = 'local' +AllowOrigins = 'http://localhost:3000,http://localhost:6688' +BridgeResponseURL = '' +BridgeCacheTTL = '0s' +HTTPWriteTimeout = '10s' +HTTPPort = 6688 +SecureCookies = true +SessionTimeout = '15m0s' +SessionReaperExpiration = '240h0m0s' +HTTPMaxSize = '32.77kb' +StartTimeout = '15s' +ListenIP = '0.0.0.0' + +[WebServer.LDAP] +ServerTLS = true +SessionTimeout = '15m0s' +QueryTimeout = '2m0s' +BaseUserAttr = 'uid' +BaseDN = '' +UsersDN = 'ou=users' +GroupsDN = 'ou=groups' +ActiveAttribute = '' +ActiveAttributeAllowedValue = '' +AdminUserGroupCN = 'NodeAdmins' +EditUserGroupCN = 'NodeEditors' +RunUserGroupCN = 'NodeRunners' +ReadUserGroupCN = 'NodeReadOnly' +UserApiTokenEnabled = false +UserAPITokenDuration = '240h0m0s' +UpstreamSyncInterval = '0s' +UpstreamSyncRateLimit = '2m0s' + +[WebServer.MFA] +RPID = '' +RPOrigin = '' + +[WebServer.RateLimit] +Authenticated = 1000 +AuthenticatedPeriod = '1m0s' +Unauthenticated = 5 +UnauthenticatedPeriod = '20s' + +[WebServer.TLS] +CertPath = '' +ForceRedirect = false +Host = '' +HTTPSPort = 6689 +KeyPath = '' +ListenIP = '0.0.0.0' + +[JobPipeline] +ExternalInitiatorsEnabled = false +MaxRunDuration = '10m0s' +MaxSuccessfulRuns = 10000 +ReaperInterval = '1h0m0s' +ReaperThreshold = '24h0m0s' +ResultWriteQueueDepth = 100 +VerboseLogging = true + +[JobPipeline.HTTPRequest] +DefaultTimeout = '15s' +MaxSize = '32.77kb' + +[FluxMonitor] +DefaultTransactionQueueDepth = 1 +SimulateTransactions = false + +[OCR2] +Enabled = false +ContractConfirmations = 3 +BlockchainTimeout = '20s' +ContractPollInterval = '1m0s' +ContractSubscribeInterval = '2m0s' +ContractTransmitterTransmitTimeout = '10s' +DatabaseTimeout = '10s' +KeyBundleID = '0000000000000000000000000000000000000000000000000000000000000000' +CaptureEATelemetry = false +CaptureAutomationCustomTelemetry = true +DefaultTransactionQueueDepth = 1 +SimulateTransactions = false +TraceLogging = false + +[OCR] +Enabled = false +ObservationTimeout = '5s' +BlockchainTimeout = '20s' +ContractPollInterval = '1m0s' +ContractSubscribeInterval = '2m0s' +DefaultTransactionQueueDepth = 1 +KeyBundleID = '0000000000000000000000000000000000000000000000000000000000000000' +SimulateTransactions = false +TransmitterAddress = '' +CaptureEATelemetry = false +TraceLogging = false + +[P2P] +IncomingMessageBufferSize = 10 +OutgoingMessageBufferSize = 10 +PeerID = '' +TraceLogging = false + +[P2P.V2] +Enabled = true +AnnounceAddresses = [] +DefaultBootstrappers = [] +DeltaDial = '15s' +DeltaReconcile = '1m0s' +ListenAddresses = [] + +[Keeper] +DefaultTransactionQueueDepth = 1 +GasPriceBufferPercent = 20 +GasTipCapBufferPercent = 20 +BaseFeeBufferPercent = 20 +MaxGracePeriod = 100 +TurnLookBack = 1000 + +[Keeper.Registry] +CheckGasOverhead = 200000 +PerformGasOverhead = 300000 +MaxPerformDataSize = 5000 +SyncInterval = '30m0s' +SyncUpkeepQueueSize = 10 + +[AutoPprof] +Enabled = false +ProfileRoot = '' +PollInterval = '10s' +GatherDuration = '10s' +GatherTraceDuration = '5s' +MaxProfileSize = '100.00mb' +CPUProfileRate = 1 +MemProfileRate = 1 +BlockProfileRate = 1 +MutexProfileFraction = 1 +MemThreshold = '4.00gb' +GoroutineThreshold = 5000 + +[Pyroscope] +ServerAddress = '' +Environment = 'mainnet' + +[Sentry] +Debug = false +DSN = '' +Environment = '' +Release = '' + +[Insecure] +DevWebServer = false +OCRDevelopmentMode = false +InfiniteDepthQueries = false +DisableRateLimiting = false + +[Tracing] +Enabled = false +CollectorTarget = '' +NodeID = '' +SamplingRatio = 0.0 +Mode = 'tls' +TLSCertPath = '' + +[Mercury] +VerboseLogging = false + +[Mercury.Cache] +LatestReportTTL = '1s' +MaxStaleAge = '1h0m0s' +LatestReportDeadline = '5s' + +[Mercury.TLS] +CertFile = '' + +[Mercury.Transmitter] +TransmitQueueMaxSize = 10000 +TransmitTimeout = '5s' +TransmitConcurrency = 100 + +[Capabilities] +[Capabilities.Peering] +IncomingMessageBufferSize = 10 +OutgoingMessageBufferSize = 10 +PeerID = '' +TraceLogging = false + +[Capabilities.Peering.V2] +Enabled = false +AnnounceAddresses = [] +DefaultBootstrappers = [] +DeltaDial = '15s' +DeltaReconcile = '1m0s' +ListenAddresses = [] + +[Capabilities.Dispatcher] +SupportedVersion = 1 +ReceiverBufferSize = 10000 + +[Capabilities.Dispatcher.RateLimit] +GlobalRPS = 800.0 +GlobalBurst = 1000 +PerSenderRPS = 10.0 +PerSenderBurst = 50 + +[Capabilities.ExternalRegistry] +Address = '' +NetworkID = 'evm' +ChainID = '1' + +[Capabilities.WorkflowRegistry] +Address = '' +NetworkID = 'evm' +ChainID = '1' + +[Capabilities.GatewayConnector] +ChainIDForNodeKey = '' +NodeAddress = '' +DonID = '' +WSHandshakeTimeoutMillis = 0 +AuthMinChallengeLen = 0 +AuthTimestampToleranceSec = 0 + +[[Capabilities.GatewayConnector.Gateways]] +ID = '' +URL = '' + +[Telemetry] +Enabled = false +CACertFile = '' +Endpoint = '' +InsecureConnection = false +TraceSampleRatio = 0.01 +EmitterBatchProcessor = true +EmitterExportTimeout = '1s' + +[[EVM]] +ChainID = '1' +AutoCreateKey = true +BlockBackfillDepth = 10 +BlockBackfillSkip = false +FinalityDepth = 50 +FinalityTagEnabled = true +LinkContractAddress = '0x514910771AF9Ca656af840dff83E8264EcF986CA' +LogBackfillBatchSize = 1000 +LogPollInterval = '15s' +LogKeepBlocksDepth = 100000 +LogPrunePageSize = 0 +BackupLogPollerBlockDelay = 100 +MinIncomingConfirmations = 3 +MinContractPayment = '0.1 link' +NonceAutoSync = true +NoNewHeadsThreshold = '3m0s' +OperatorFactoryAddress = '0x3E64Cd889482443324F91bFA9c84fE72A511f48A' +LogBroadcasterEnabled = true +RPCDefaultBatchSize = 250 +RPCBlockQueryDelay = 1 +FinalizedBlockOffset = 0 +NoNewFinalizedHeadsThreshold = '9m0s' + +[EVM.Transactions] +Enabled = true +ForwardersEnabled = false +MaxInFlight = 16 +MaxQueued = 250 +ReaperInterval = '1h0m0s' +ReaperThreshold = '168h0m0s' +ResendAfterThreshold = '1m0s' + +[EVM.Transactions.AutoPurge] +Enabled = false + +[EVM.BalanceMonitor] +Enabled = true + +[EVM.GasEstimator] +Mode = 'BlockHistory' +PriceDefault = '20 gwei' +PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' +PriceMin = '1 gwei' +LimitDefault = 500000 +LimitMax = 500000 +LimitMultiplier = '1' +LimitTransfer = 21000 +EstimateLimit = false +BumpMin = '5 gwei' +BumpPercent = 20 +BumpThreshold = 3 +EIP1559DynamicFees = true +FeeCapDefault = '100 gwei' +TipCapDefault = '1 wei' +TipCapMin = '1 wei' + +[EVM.GasEstimator.BlockHistory] +BatchSize = 25 +BlockHistorySize = 4 +CheckInclusionBlocks = 12 +CheckInclusionPercentile = 90 +TransactionPercentile = 50 + +[EVM.GasEstimator.FeeHistory] +CacheTimeout = '10s' + +[EVM.HeadTracker] +HistoryDepth = 100 +MaxBufferSize = 3 +SamplingInterval = '1s' +MaxAllowedFinalityDepth = 10000 +FinalityTagBypass = true +PersistenceEnabled = true + +[EVM.NodePool] +PollFailureThreshold = 5 +PollInterval = '10s' +SelectionMode = 'HighestHead' +SyncThreshold = 5 +LeaseDuration = '0s' +NodeIsSyncingEnabled = false +FinalizedBlockPollInterval = '5s' +EnforceRepeatableRead = true +DeathDeclarationDelay = '1m0s' +NewHeadsPollInterval = '0s' + +[EVM.OCR] +ContractConfirmations = 4 +ContractTransmitterTransmitTimeout = '10s' +DatabaseTimeout = '10s' +DeltaCOverride = '168h0m0s' +DeltaCJitterOverride = '1h0m0s' +ObservationGracePeriod = '1s' + +[EVM.OCR2] +[EVM.OCR2.Automation] +GasLimit = 10500000 + +[EVM.Workflow] +GasLimitDefault = 400000 + +[[EVM.Nodes]] +Name = 'fake' +WSURL = 'wss://foo.bar/ws' +HTTPURL = 'https://foo.bar' + +Valid configuration. From 99f21e9b99b51874a7f2b730ead592adabd379c3 Mon Sep 17 00:00:00 2001 From: Jordan Krage Date: Tue, 7 Jan 2025 13:22:20 -0600 Subject: [PATCH 2/8] deployment: golangci-lint run --fix (#15838) --- .golangci.yml | 1 - deployment/.golangci.yml | 1 - deployment/address_book.go | 10 ++--- deployment/ccip/changeset/cs_ccip_home.go | 45 ++++++++++--------- .../ccip/changeset/cs_ccip_home_test.go | 4 +- .../ccip/changeset/cs_chain_contracts.go | 9 ++-- .../ccip/changeset/cs_chain_contracts_test.go | 4 +- deployment/ccip/changeset/cs_deploy_chain.go | 17 +++---- deployment/ccip/changeset/cs_home_chain.go | 18 ++++---- .../ccip/changeset/cs_home_chain_test.go | 4 +- .../ccip/changeset/cs_update_rmn_config.go | 7 +-- deployment/ccip/changeset/state.go | 3 +- deployment/ccip/changeset/test_assertions.go | 11 ++--- deployment/ccip/changeset/test_environment.go | 10 ++--- deployment/ccip/changeset/test_helpers.go | 10 ++--- .../ccip/changeset/test_usdc_helpers.go | 4 +- deployment/ccip/changeset/v1_5/cs_jobspec.go | 5 ++- .../ccip/view/v1_0/rmn_proxy_contract.go | 3 +- deployment/ccip/view/v1_2/price_registry.go | 3 +- .../ccip/view/v1_2/price_registry_test.go | 8 ++-- deployment/ccip/view/v1_5/commit_store.go | 3 +- deployment/ccip/view/v1_5/offramp.go | 3 +- deployment/ccip/view/v1_5/offramp_test.go | 2 +- deployment/ccip/view/v1_5/onramp.go | 3 +- deployment/ccip/view/v1_5/onramp_test.go | 3 +- deployment/ccip/view/v1_5/rmn.go | 3 +- deployment/ccip/view/v1_5/rmn_test.go | 4 +- .../ccip/view/v1_5/tokenadminregistry.go | 3 +- deployment/ccip/view/v1_6/ccip_home.go | 3 +- deployment/ccip/view/v1_6/ccip_home_test.go | 2 +- deployment/ccip/view/v1_6/rmnhome.go | 1 + .../common/changeset/deploy_link_token.go | 4 +- .../example/add_mint_burners_link.go | 2 - .../common/changeset/example/link_transfer.go | 3 -- .../changeset/example/link_transfer_test.go | 1 - .../common/changeset/example/mint_link.go | 2 - deployment/common/changeset/internal/mcms.go | 6 +-- deployment/common/changeset/save_existing.go | 6 +-- .../common/changeset/set_config_mcms.go | 1 - .../common/changeset/set_config_mcms_test.go | 4 +- deployment/common/changeset/test_helpers.go | 1 + .../transfer_to_mcms_with_timelock.go | 16 +++---- .../common/proposalutils/mcms_helpers.go | 11 ++--- deployment/common/proposalutils/propose.go | 4 +- deployment/common/types/types.go | 26 +++++------ deployment/common/view/nops.go | 6 ++- deployment/common/view/v1_0/capreg_test.go | 4 +- deployment/common/view/v1_0/link_token.go | 4 +- .../common/view/v1_0/link_token_test.go | 12 ++--- .../common/view/v1_0/static_link_token.go | 4 +- .../view/v1_0/static_link_token_test.go | 6 +-- deployment/environment.go | 3 +- deployment/environment/crib/types.go | 1 + deployment/environment/devenv/chain.go | 3 +- deployment/environment/devenv/don.go | 6 ++- deployment/environment/devenv/don_test.go | 1 - deployment/environment/devenv/environment.go | 5 ++- deployment/environment/devenv/jd.go | 5 ++- deployment/environment/memory/job_client.go | 30 ++++++------- .../environment/nodeclient/chainlink.go | 11 ++--- .../environment/web/sdk/client/client.go | 38 ++++++++-------- .../environment/web/sdk/client/types.go | 4 +- deployment/evm_kmsclient.go | 15 ++++--- deployment/helpers.go | 3 +- .../changeset/append_node_capabilities.go | 4 +- .../append_node_capabilities_test.go | 7 ++- .../keystone/changeset/deploy_consumer.go | 3 +- .../changeset/deploy_consumer_test.go | 2 +- .../keystone/changeset/deploy_forwarder.go | 6 ++- .../changeset/deploy_forwarder_test.go | 11 ++--- deployment/keystone/changeset/deploy_ocr3.go | 8 ++-- .../keystone/changeset/deploy_ocr3_test.go | 7 +-- .../keystone/changeset/deploy_registry.go | 3 +- .../changeset/deploy_registry_test.go | 2 +- .../internal/append_node_capabilities.go | 5 ++- .../internal/capability_management.go | 1 + .../changeset/internal/contract_set.go | 1 - .../keystone/changeset/internal/deploy.go | 10 ++--- .../changeset/internal/forwarder_deployer.go | 1 + .../changeset/internal/ocr3_deployer.go | 1 + .../keystone/changeset/internal/ocr3config.go | 4 +- .../changeset/internal/ocr3config_test.go | 2 +- .../keystone/changeset/internal/types.go | 13 +++--- .../keystone/changeset/internal/types_test.go | 7 +-- .../keystone/changeset/internal/update_don.go | 8 ++-- .../changeset/internal/update_don_test.go | 7 ++- .../internal/update_node_capabilities.go | 5 ++- .../changeset/internal/update_nodes.go | 3 +- .../changeset/internal/update_nodes_test.go | 2 - deployment/keystone/changeset/update_don.go | 11 +++-- .../keystone/changeset/update_don_test.go | 6 +-- .../changeset/update_node_capabilities.go | 6 ++- .../update_node_capabilities_test.go | 9 ++-- deployment/keystone/changeset/update_nodes.go | 6 ++- .../keystone/changeset/update_nodes_test.go | 7 ++- deployment/keystone/changeset/view.go | 1 - .../changeset/workflowregistry/deploy.go | 3 +- .../changeset/workflowregistry/deploy_test.go | 2 +- .../changeset/workflowregistry/setup_test.go | 3 +- .../update_allowed_dons_test.go | 6 +-- .../update_authorized_addresses_test.go | 4 +- .../workflow_registry_deployer.go | 1 - deployment/multiclient_test.go | 4 +- integration-tests/.golangci.yml | 1 - 104 files changed, 330 insertions(+), 318 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index 63b061c2951..d35b6459e05 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -8,7 +8,6 @@ linters: - errname - errorlint - exhaustive - - exportloopref - fatcontext - ginkgolinter - gocritic diff --git a/deployment/.golangci.yml b/deployment/.golangci.yml index ff1303e26ce..7341210ce00 100644 --- a/deployment/.golangci.yml +++ b/deployment/.golangci.yml @@ -8,7 +8,6 @@ linters: - errname - errorlint - exhaustive - - exportloopref - fatcontext - ginkgolinter - gocritic diff --git a/deployment/address_book.go b/deployment/address_book.go index 3ce0332a4c3..fde0adc2d97 100644 --- a/deployment/address_book.go +++ b/deployment/address_book.go @@ -14,9 +14,9 @@ import ( ) var ( - ErrInvalidChainSelector = fmt.Errorf("invalid chain selector") - ErrInvalidAddress = fmt.Errorf("invalid address") - ErrChainNotFound = fmt.Errorf("chain not found") + ErrInvalidChainSelector = errors.New("invalid chain selector") + ErrInvalidAddress = errors.New("invalid address") + ErrChainNotFound = errors.New("chain not found") ) // ContractType is a simple string type for identifying contract types. @@ -117,7 +117,7 @@ func (m *AddressBookMap) save(chainSelector uint64, address string, typeAndVersi // TODO NONEVM-960: Add validation for non-EVM chain addresses if typeAndVersion.Type == "" { - return fmt.Errorf("type cannot be empty") + return errors.New("type cannot be empty") } if _, exists := m.addressesByChain[chainSelector]; !exists { @@ -256,7 +256,7 @@ func SearchAddressBook(ab AddressBook, chain uint64, typ ContractType) (string, } } - return "", fmt.Errorf("not found") + return "", errors.New("not found") } func AddressBookContains(ab AddressBook, chain uint64, addrToFind string) (bool, error) { diff --git a/deployment/ccip/changeset/cs_ccip_home.go b/deployment/ccip/changeset/cs_ccip_home.go index 0c82afee261..7d3327a31f2 100644 --- a/deployment/ccip/changeset/cs_ccip_home.go +++ b/deployment/ccip/changeset/cs_ccip_home.go @@ -3,6 +3,7 @@ package changeset import ( "bytes" "encoding/hex" + "errors" "fmt" "math/big" "os" @@ -139,7 +140,7 @@ func (p PromoteCandidatesChangesetConfig) Validate(e deployment.Environment) ([] if p.PluginType != types.PluginTypeCCIPCommit && p.PluginType != types.PluginTypeCCIPExec { - return nil, fmt.Errorf("PluginType must be set to either CCIPCommit or CCIPExec") + return nil, errors.New("PluginType must be set to either CCIPCommit or CCIPExec") } var donIDs []uint32 @@ -153,7 +154,7 @@ func (p PromoteCandidatesChangesetConfig) Validate(e deployment.Environment) ([] } if chainState.OffRamp == nil { // should not be possible, but a defensive check. - return nil, fmt.Errorf("OffRamp contract does not exist") + return nil, errors.New("OffRamp contract does not exist") } donID, err := internal.DonIDForChain( @@ -182,13 +183,13 @@ func (p PromoteCandidatesChangesetConfig) Validate(e deployment.Environment) ([] donIDs = append(donIDs, donID) } if len(e.NodeIDs) == 0 { - return nil, fmt.Errorf("NodeIDs must be set") + return nil, errors.New("NodeIDs must be set") } if state.Chains[p.HomeChainSelector].CCIPHome == nil { - return nil, fmt.Errorf("CCIPHome contract does not exist") + return nil, errors.New("CCIPHome contract does not exist") } if state.Chains[p.HomeChainSelector].CapabilityRegistry == nil { - return nil, fmt.Errorf("CapabilityRegistry contract does not exist") + return nil, errors.New("CapabilityRegistry contract does not exist") } return donIDs, nil @@ -316,7 +317,7 @@ func (s SetCandidateConfigBase) Validate(e deployment.Environment, state CCIPOnC } if s.PluginType != types.PluginTypeCCIPCommit && s.PluginType != types.PluginTypeCCIPExec { - return fmt.Errorf("PluginType must be set to either CCIPCommit or CCIPExec") + return errors.New("PluginType must be set to either CCIPCommit or CCIPExec") } // no donID check since this config is used for both adding a new DON and updating an existing one. @@ -340,17 +341,17 @@ func (s SetCandidateConfigBase) Validate(e deployment.Environment, state CCIPOnC // TODO: validate gas config in the chain config in cciphome for this RemoteChainSelectors. } if len(e.NodeIDs) == 0 { - return fmt.Errorf("nodeIDs must be set") + return errors.New("nodeIDs must be set") } if state.Chains[s.HomeChainSelector].CCIPHome == nil { - return fmt.Errorf("CCIPHome contract does not exist") + return errors.New("CCIPHome contract does not exist") } if state.Chains[s.HomeChainSelector].CapabilityRegistry == nil { - return fmt.Errorf("CapabilityRegistry contract does not exist") + return errors.New("CapabilityRegistry contract does not exist") } if e.OCRSecrets.IsEmpty() { - return fmt.Errorf("OCR secrets must be set") + return errors.New("OCR secrets must be set") } return nil @@ -443,7 +444,7 @@ func AddDonAndSetCandidateChangeset( pluginOCR3Config, ok := newDONArgs[cfg.PluginType] if !ok { - return deployment.ChangesetOutput{}, fmt.Errorf("missing commit plugin in ocr3Configs") + return deployment.ChangesetOutput{}, errors.New("missing commit plugin in ocr3Configs") } expectedDonID := latestDon.Id + 1 @@ -476,7 +477,7 @@ func AddDonAndSetCandidateChangeset( ChainIdentifier: mcms.ChainIdentifier(cfg.HomeChainSelector), Batch: donOps, }}, - fmt.Sprintf("addDON on new Chain && setCandidate for plugin %s", cfg.PluginType.String()), + "addDON on new Chain && setCandidate for plugin "+cfg.PluginType.String(), cfg.MCMS.MinDelay, ) if err != nil { @@ -671,7 +672,7 @@ func setCandidateOnExistingDon( mcmsEnabled bool, ) ([]mcms.Operation, error) { if donID == 0 { - return nil, fmt.Errorf("donID is zero") + return nil, errors.New("donID is zero") } encodedSetCandidateCall, err := internal.CCIPHomeABI.Pack( @@ -791,7 +792,7 @@ func promoteAllCandidatesForChainOps( mcmsEnabled bool, ) (mcms.Operation, error) { if donID == 0 { - return mcms.Operation{}, fmt.Errorf("donID is zero") + return mcms.Operation{}, errors.New("donID is zero") } updatePluginOp, err := promoteCandidateOp( @@ -831,13 +832,13 @@ func (r RevokeCandidateChangesetConfig) Validate(e deployment.Environment, state return 0, fmt.Errorf("don chain selector invalid: %w", err) } if len(e.NodeIDs) == 0 { - return 0, fmt.Errorf("NodeIDs must be set") + return 0, errors.New("NodeIDs must be set") } if state.Chains[r.HomeChainSelector].CCIPHome == nil { - return 0, fmt.Errorf("CCIPHome contract does not exist") + return 0, errors.New("CCIPHome contract does not exist") } if state.Chains[r.HomeChainSelector].CapabilityRegistry == nil { - return 0, fmt.Errorf("CapabilityRegistry contract does not exist") + return 0, errors.New("CapabilityRegistry contract does not exist") } homeChainState, exists := state.Chains[r.HomeChainSelector] if !exists { @@ -866,7 +867,7 @@ func (r RevokeCandidateChangesetConfig) Validate(e deployment.Environment, state return 0, fmt.Errorf("fetching candidate digest from cciphome: %w", err) } if candidateDigest == [32]byte{} { - return 0, fmt.Errorf("candidate config digest is zero, can't revoke it") + return 0, errors.New("candidate config digest is zero, can't revoke it") } return donID, nil @@ -947,7 +948,7 @@ func revokeCandidateOps( mcmsEnabled bool, ) ([]mcms.Operation, error) { if donID == 0 { - return nil, fmt.Errorf("donID is zero") + return nil, errors.New("donID is zero") } candidateDigest, err := ccipHome.GetCandidateDigest(nil, donID, pluginType) @@ -1017,7 +1018,7 @@ func (c UpdateChainConfigConfig) Validate(e deployment.Environment) error { return fmt.Errorf("home chain selector invalid: %w", err) } if len(c.RemoteChainRemoves) == 0 && len(c.RemoteChainAdds) == 0 { - return fmt.Errorf("no chain adds or removes") + return errors.New("no chain adds or removes") } homeChainState, exists := state.Chains[c.HomeChainSelector] if !exists { @@ -1042,10 +1043,10 @@ func (c UpdateChainConfigConfig) Validate(e deployment.Environment) error { return fmt.Errorf("chain to add %d is not supported", add) } if ccfg.FChain == 0 { - return fmt.Errorf("FChain must be set") + return errors.New("FChain must be set") } if len(ccfg.Readers) == 0 { - return fmt.Errorf("Readers must be set") + return errors.New("Readers must be set") } } return nil diff --git a/deployment/ccip/changeset/cs_ccip_home_test.go b/deployment/ccip/changeset/cs_ccip_home_test.go index dae32557f8b..eb22f05a703 100644 --- a/deployment/ccip/changeset/cs_ccip_home_test.go +++ b/deployment/ccip/changeset/cs_ccip_home_test.go @@ -459,7 +459,7 @@ func Test_UpdateChainConfigs(t *testing.T) { ccipHome := state.Chains[tenv.HomeChainSel].CCIPHome otherChainConfig, err := ccipHome.GetChainConfig(nil, otherChain) require.NoError(t, err) - assert.True(t, otherChainConfig.FChain != 0) + assert.NotZero(t, otherChainConfig.FChain) var mcmsConfig *MCMSConfig if tc.mcmsEnabled { @@ -488,7 +488,7 @@ func Test_UpdateChainConfigs(t *testing.T) { // other chain should be gone chainConfigAfter, err := ccipHome.GetChainConfig(nil, otherChain) require.NoError(t, err) - assert.True(t, chainConfigAfter.FChain == 0) + assert.Zero(t, chainConfigAfter.FChain) // Lets add it back now. _, err = commonchangeset.ApplyChangesets(t, tenv.Env, map[uint64]*proposalutils.TimelockExecutionContracts{ diff --git a/deployment/ccip/changeset/cs_chain_contracts.go b/deployment/ccip/changeset/cs_chain_contracts.go index f85814f1768..e87e66e06b5 100644 --- a/deployment/ccip/changeset/cs_chain_contracts.go +++ b/deployment/ccip/changeset/cs_chain_contracts.go @@ -221,7 +221,7 @@ func UpdateNonceManagersCS(e deployment.Environment, cfg UpdateNonceManagerConfi type UpdateOnRampDestsConfig struct { UpdatesByChain map[uint64]map[uint64]OnRampDestinationUpdate // Disallow mixing MCMS/non-MCMS per chain for simplicity. - // (can still be acheived by calling this function multiple times) + // (can still be achieved by calling this function multiple times) MCMS *MCMSConfig } @@ -265,7 +265,7 @@ func (cfg UpdateOnRampDestsConfig) Validate(e deployment.Environment) error { return fmt.Errorf("failed to get onramp static config %s: %w", chainState.OnRamp.Address(), err) } if destination == sc.ChainSelector { - return fmt.Errorf("cannot update onramp destination to the same chain") + return errors.New("cannot update onramp destination to the same chain") } } } @@ -514,7 +514,7 @@ func UpdateFeeQuoterPricesCS(e deployment.Environment, cfg UpdateFeeQuoterPrices type UpdateFeeQuoterDestsConfig struct { UpdatesByChain map[uint64]map[uint64]fee_quoter.FeeQuoterDestChainConfig // Disallow mixing MCMS/non-MCMS per chain for simplicity. - // (can still be acheived by calling this function multiple times) + // (can still be achieved by calling this function multiple times) MCMS *MCMSConfig } @@ -552,7 +552,7 @@ func (cfg UpdateFeeQuoterDestsConfig) Validate(e deployment.Environment) error { return fmt.Errorf("failed to get onramp static config %s: %w", chainState.OnRamp.Address(), err) } if destination == sc.ChainSelector { - return fmt.Errorf("source and destination chain cannot be the same") + return errors.New("source and destination chain cannot be the same") } } } @@ -824,7 +824,6 @@ func (cfg UpdateRouterRampsConfig) Validate(e deployment.Environment) error { return fmt.Errorf("missing offramp for dest %d", destination) } } - } return nil } diff --git a/deployment/ccip/changeset/cs_chain_contracts_test.go b/deployment/ccip/changeset/cs_chain_contracts_test.go index 0a1e0ce3b7b..adbcc078373 100644 --- a/deployment/ccip/changeset/cs_chain_contracts_test.go +++ b/deployment/ccip/changeset/cs_chain_contracts_test.go @@ -81,11 +81,11 @@ func TestUpdateOnRampsDests(t *testing.T) { sourceCfg, err := state.Chains[source].OnRamp.GetDestChainConfig(&bind.CallOpts{Context: ctx}, dest) require.NoError(t, err) require.Equal(t, state.Chains[source].TestRouter.Address(), sourceCfg.Router) - require.Equal(t, false, sourceCfg.AllowlistEnabled) + require.False(t, sourceCfg.AllowlistEnabled) destCfg, err := state.Chains[dest].OnRamp.GetDestChainConfig(&bind.CallOpts{Context: ctx}, source) require.NoError(t, err) require.Equal(t, state.Chains[dest].Router.Address(), destCfg.Router) - require.Equal(t, true, destCfg.AllowlistEnabled) + require.True(t, destCfg.AllowlistEnabled) }) } } diff --git a/deployment/ccip/changeset/cs_deploy_chain.go b/deployment/ccip/changeset/cs_deploy_chain.go index 5a6085202a9..68655377f2e 100644 --- a/deployment/ccip/changeset/cs_deploy_chain.go +++ b/deployment/ccip/changeset/cs_deploy_chain.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "math/big" @@ -81,7 +82,7 @@ func deployChainContractsForChains( capReg := existingState.Chains[homeChainSel].CapabilityRegistry if capReg == nil { e.Logger.Errorw("Failed to get capability registry") - return fmt.Errorf("capability registry not found") + return errors.New("capability registry not found") } cr, err := capReg.GetHashedCapabilityId( &bind.CallOpts{}, internal.CapabilityLabelledName, internal.CapabilityVersion) @@ -105,12 +106,12 @@ func deployChainContractsForChains( return err } if ccipHome.Address() != existingState.Chains[homeChainSel].CCIPHome.Address() { - return fmt.Errorf("ccip home address mismatch") + return errors.New("ccip home address mismatch") } rmnHome := existingState.Chains[homeChainSel].RMNHome if rmnHome == nil { e.Logger.Errorw("Failed to get rmn home", "err", err) - return fmt.Errorf("rmn home not found") + return errors.New("rmn home not found") } deployGrp := errgroup.Group{} for _, chainSel := range chainsToDeploy { @@ -203,7 +204,7 @@ func deployChainContracts( rmnLegacyAddr, ) return deployment.ContractDeploy[*rmn_remote.RMNRemote]{ - rmnRemoteAddr, rmnRemote, tx, deployment.NewTypeAndVersion(RMNRemote, deployment.Version1_6_0_dev), err2, + Address: rmnRemoteAddr, Contract: rmnRemote, Tx: tx, Tv: deployment.NewTypeAndVersion(RMNRemote, deployment.Version1_6_0_dev), Err: err2, } }) if err != nil { @@ -243,7 +244,7 @@ func deployChainContracts( RMNProxy.Address(), ) return deployment.ContractDeploy[*router.Router]{ - routerAddr, routerC, tx2, deployment.NewTypeAndVersion(TestRouter, deployment.Version1_2_0), err2, + Address: routerAddr, Contract: routerC, Tx: tx2, Tv: deployment.NewTypeAndVersion(TestRouter, deployment.Version1_2_0), Err: err2, } }) if err != nil { @@ -264,7 +265,7 @@ func deployChainContracts( []common.Address{}, // Need to add onRamp after ) return deployment.ContractDeploy[*nonce_manager.NonceManager]{ - nonceManagerAddr, nonceManager, tx2, deployment.NewTypeAndVersion(NonceManager, deployment.Version1_6_0_dev), err2, + Address: nonceManagerAddr, Contract: nonceManager, Tx: tx2, Tv: deployment.NewTypeAndVersion(NonceManager, deployment.Version1_6_0_dev), Err: err2, } }) if err != nil { @@ -304,7 +305,7 @@ func deployChainContracts( []fee_quoter.FeeQuoterDestChainConfigArgs{}, ) return deployment.ContractDeploy[*fee_quoter.FeeQuoter]{ - prAddr, pr, tx2, deployment.NewTypeAndVersion(FeeQuoter, deployment.Version1_6_0_dev), err2, + Address: prAddr, Contract: pr, Tx: tx2, Tv: deployment.NewTypeAndVersion(FeeQuoter, deployment.Version1_6_0_dev), Err: err2, } }) if err != nil { @@ -335,7 +336,7 @@ func deployChainContracts( []onramp.OnRampDestChainConfigArgs{}, ) return deployment.ContractDeploy[*onramp.OnRamp]{ - onRampAddr, onRamp, tx2, deployment.NewTypeAndVersion(OnRamp, deployment.Version1_6_0_dev), err2, + Address: onRampAddr, Contract: onRamp, Tx: tx2, Tv: deployment.NewTypeAndVersion(OnRamp, deployment.Version1_6_0_dev), Err: err2, } }) if err != nil { diff --git a/deployment/ccip/changeset/cs_home_chain.go b/deployment/ccip/changeset/cs_home_chain.go index b92a8d132a4..3b985f5c526 100644 --- a/deployment/ccip/changeset/cs_home_chain.go +++ b/deployment/ccip/changeset/cs_home_chain.go @@ -61,23 +61,23 @@ type DeployHomeChainConfig struct { func (c DeployHomeChainConfig) Validate() error { if c.HomeChainSel == 0 { - return fmt.Errorf("home chain selector must be set") + return errors.New("home chain selector must be set") } if c.RMNDynamicConfig.OffchainConfig == nil { - return fmt.Errorf("offchain config for RMNHomeDynamicConfig must be set") + return errors.New("offchain config for RMNHomeDynamicConfig must be set") } if c.RMNStaticConfig.OffchainConfig == nil { - return fmt.Errorf("offchain config for RMNHomeStaticConfig must be set") + return errors.New("offchain config for RMNHomeStaticConfig must be set") } if len(c.NodeOperators) == 0 { - return fmt.Errorf("node operators must be set") + return errors.New("node operators must be set") } for _, nop := range c.NodeOperators { if nop.Admin == (common.Address{}) { - return fmt.Errorf("node operator admin address must be set") + return errors.New("node operator admin address must be set") } if nop.Name == "" { - return fmt.Errorf("node operator name must be set") + return errors.New("node operator name must be set") } if len(c.NodeP2PIDsPerNodeOpAdmin[nop.Name]) == 0 { return fmt.Errorf("node operator %s must have node p2p ids provided", nop.Name) @@ -338,14 +338,14 @@ func (c RemoveDONsConfig) Validate(homeChain CCIPChainState) error { return fmt.Errorf("home chain selector must be set %w", err) } if len(c.DonIDs) == 0 { - return fmt.Errorf("don ids must be set") + return errors.New("don ids must be set") } // Cap reg must exist if homeChain.CapabilityRegistry == nil { - return fmt.Errorf("cap reg does not exist") + return errors.New("cap reg does not exist") } if homeChain.CCIPHome == nil { - return fmt.Errorf("ccip home does not exist") + return errors.New("ccip home does not exist") } if err := internal.DONIdExists(homeChain.CapabilityRegistry, c.DonIDs); err != nil { return err diff --git a/deployment/ccip/changeset/cs_home_chain_test.go b/deployment/ccip/changeset/cs_home_chain_test.go index 8a2d4f87709..e96cd878305 100644 --- a/deployment/ccip/changeset/cs_home_chain_test.go +++ b/deployment/ccip/changeset/cs_home_chain_test.go @@ -52,12 +52,12 @@ func TestDeployHomeChain(t *testing.T) { capRegSnap, ok := snap[chainName].CapabilityRegistry[state.Chains[homeChainSel].CapabilityRegistry.Address().String()] require.True(t, ok) require.NotNil(t, capRegSnap) - require.Equal(t, capRegSnap.Nops, []v1_0.NopView{ + require.Equal(t, []v1_0.NopView{ { Admin: e.Chains[homeChainSel].DeployerKey.From, Name: "NodeOperator", }, - }) + }, capRegSnap.Nops) require.Len(t, capRegSnap.Nodes, len(p2pIds)) } diff --git a/deployment/ccip/changeset/cs_update_rmn_config.go b/deployment/ccip/changeset/cs_update_rmn_config.go index 96f8eacb4cc..337b3756881 100644 --- a/deployment/ccip/changeset/cs_update_rmn_config.go +++ b/deployment/ccip/changeset/cs_update_rmn_config.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "math/big" "reflect" @@ -178,14 +179,14 @@ func (c SetRMNHomeCandidateConfig) Validate(state CCIPOnChainState) error { } if len(c.RMNDynamicConfig.OffchainConfig) != 0 { - return fmt.Errorf("RMNDynamicConfig.OffchainConfig must be empty") + return errors.New("RMNDynamicConfig.OffchainConfig must be empty") } if len(c.RMNStaticConfig.OffchainConfig) != 0 { - return fmt.Errorf("RMNStaticConfig.OffchainConfig must be empty") + return errors.New("RMNStaticConfig.OffchainConfig must be empty") } if len(c.RMNStaticConfig.Nodes) > 256 { - return fmt.Errorf("RMNStaticConfig.Nodes must be less than 256") + return errors.New("RMNStaticConfig.Nodes must be less than 256") } var ( diff --git a/deployment/ccip/changeset/state.go b/deployment/ccip/changeset/state.go index b50724eaa16..aa07168a6d2 100644 --- a/deployment/ccip/changeset/state.go +++ b/deployment/ccip/changeset/state.go @@ -2,6 +2,7 @@ package changeset import ( "fmt" + "strconv" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" @@ -366,7 +367,7 @@ func (s CCIPOnChainState) View(chains []uint64) (map[string]view.ChainView, erro } name := chainInfo.ChainName if chainInfo.ChainName == "" { - name = fmt.Sprintf("%d", chainSelector) + name = strconv.FormatUint(chainSelector, 10) } m[name] = chainView } diff --git a/deployment/ccip/changeset/test_assertions.go b/deployment/ccip/changeset/test_assertions.go index bcfb49250d4..c83d6e3597a 100644 --- a/deployment/ccip/changeset/test_assertions.go +++ b/deployment/ccip/changeset/test_assertions.go @@ -2,6 +2,7 @@ package changeset import ( "context" + "errors" "fmt" "math/big" "sync" @@ -360,12 +361,12 @@ func ConfirmCommitWithExpectedSeqNumRange( if mr.SourceChainSelector == src.Selector && uint64(expectedSeqNumRange.Start()) >= mr.MinSeqNr && uint64(expectedSeqNumRange.End()) <= mr.MaxSeqNr { - t.Logf("All sequence numbers commited in a single report [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) + t.Logf("All sequence numbers committed in a single report [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) return event, nil } if !enforceSingleCommit && seenMessages.allCommited(src.Selector) { - t.Logf("All sequence numbers already commited from range [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) + t.Logf("All sequence numbers already committed from range [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) return event, nil } } @@ -389,12 +390,12 @@ func ConfirmCommitWithExpectedSeqNumRange( if mr.SourceChainSelector == src.Selector && uint64(expectedSeqNumRange.Start()) >= mr.MinSeqNr && uint64(expectedSeqNumRange.End()) <= mr.MaxSeqNr { - t.Logf("All sequence numbers commited in a single report [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) + t.Logf("All sequence numbers committed in a single report [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) return report, nil } if !enforceSingleCommit && seenMessages.allCommited(src.Selector) { - t.Logf("All sequence numbers already commited from range [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) + t.Logf("All sequence numbers already committed from range [%d, %d]", expectedSeqNumRange.Start(), expectedSeqNumRange.End()) return report, nil } } @@ -482,7 +483,7 @@ func ConfirmExecWithSeqNrs( expectedSeqNrs []uint64, ) (executionStates map[uint64]int, err error) { if len(expectedSeqNrs) == 0 { - return nil, fmt.Errorf("no expected sequence numbers provided") + return nil, errors.New("no expected sequence numbers provided") } timer := time.NewTimer(8 * time.Minute) diff --git a/deployment/ccip/changeset/test_environment.go b/deployment/ccip/changeset/test_environment.go index f723efbf619..a1307d9820b 100644 --- a/deployment/ccip/changeset/test_environment.go +++ b/deployment/ccip/changeset/test_environment.go @@ -2,7 +2,7 @@ package changeset import ( "context" - "fmt" + "errors" "math/big" "os" "testing" @@ -61,16 +61,16 @@ type TestConfigs struct { func (tc *TestConfigs) Validate() error { if tc.Chains < 2 { - return fmt.Errorf("chains must be at least 2") + return errors.New("chains must be at least 2") } if tc.Nodes < 4 { - return fmt.Errorf("nodes must be at least 4") + return errors.New("nodes must be at least 4") } if tc.Bootstraps < 1 { - return fmt.Errorf("bootstraps must be at least 1") + return errors.New("bootstraps must be at least 1") } if tc.Type == Memory && tc.RMNEnabled { - return fmt.Errorf("cannot run RMN tests in memory mode") + return errors.New("cannot run RMN tests in memory mode") } return nil } diff --git a/deployment/ccip/changeset/test_helpers.go b/deployment/ccip/changeset/test_helpers.go index 2069030191c..03c3ffb175d 100644 --- a/deployment/ccip/changeset/test_helpers.go +++ b/deployment/ccip/changeset/test_helpers.go @@ -582,7 +582,7 @@ func deploySingleFeed( deployFunc func(deployment.Chain) deployment.ContractDeploy[*aggregator_v3_interface.AggregatorV3Interface], symbol TokenSymbol, ) (common.Address, string, error) { - //tokenTV := deployment.NewTypeAndVersion(PriceFeed, deployment.Version1_0_0) + // tokenTV := deployment.NewTypeAndVersion(PriceFeed, deployment.Version1_0_0) mockTokenFeed, err := deployment.DeployContract(lggr, chain, ab, deployFunc) if err != nil { lggr.Errorw("Failed to deploy token feed", "err", err, "symbol", symbol) @@ -736,7 +736,7 @@ func deployTokenPoolsInParallel( return nil, nil, nil, nil, err } if srcToken == nil || srcPool == nil || dstToken == nil || dstPool == nil { - return nil, nil, nil, nil, fmt.Errorf("failed to deploy token and pool") + return nil, nil, nil, nil, errors.New("failed to deploy token and pool") } return srcToken, srcPool, dstToken, dstPool, nil } @@ -763,7 +763,7 @@ func setUSDCTokenPoolCounterPart( var fixedAddr [32]byte copy(fixedAddr[:], allowedCaller[:32]) - domain, _ := reader.AllAvailableDomains()[destChainSelector] + domain := reader.AllAvailableDomains()[destChainSelector] domains := []usdc_token_pool.USDCTokenPoolDomainUpdate{ { @@ -917,7 +917,7 @@ func deployTransferTokenOneEnd( big.NewInt(0).Mul(big.NewInt(1e9), big.NewInt(1e18)), ) return deployment.ContractDeploy[*burn_mint_erc677.BurnMintERC677]{ - tokenAddress, token, tx, deployment.NewTypeAndVersion(BurnMintToken, deployment.Version1_0_0), err2, + Address: tokenAddress, Contract: token, Tx: tx, Tv: deployment.NewTypeAndVersion(BurnMintToken, deployment.Version1_0_0), Err: err2, } }) if err != nil { @@ -946,7 +946,7 @@ func deployTransferTokenOneEnd( common.HexToAddress(routerAddress), ) return deployment.ContractDeploy[*burn_mint_token_pool.BurnMintTokenPool]{ - tokenPoolAddress, tokenPoolContract, tx, deployment.NewTypeAndVersion(BurnMintTokenPool, deployment.Version1_5_1), err2, + Address: tokenPoolAddress, Contract: tokenPoolContract, Tx: tx, Tv: deployment.NewTypeAndVersion(BurnMintTokenPool, deployment.Version1_5_1), Err: err2, } }) if err != nil { diff --git a/deployment/ccip/changeset/test_usdc_helpers.go b/deployment/ccip/changeset/test_usdc_helpers.go index 55f1bd25a36..c9dd87b866e 100644 --- a/deployment/ccip/changeset/test_usdc_helpers.go +++ b/deployment/ccip/changeset/test_usdc_helpers.go @@ -115,8 +115,8 @@ func UpdateFeeQuoterForUSDC( DestChainSelector: dstChain, TokenTransferFeeConfigs: []fee_quoter.FeeQuoterTokenTransferFeeConfigSingleTokenArgs{ { - usdcToken.Address(), - fee_quoter.FeeQuoterTokenTransferFeeConfig{ + Token: usdcToken.Address(), + TokenTransferFeeConfig: fee_quoter.FeeQuoterTokenTransferFeeConfig{ MinFeeUSDCents: 50, MaxFeeUSDCents: 50_000, DeciBps: 0, diff --git a/deployment/ccip/changeset/v1_5/cs_jobspec.go b/deployment/ccip/changeset/v1_5/cs_jobspec.go index bdb36d531f8..e1cd73f1e30 100644 --- a/deployment/ccip/changeset/v1_5/cs_jobspec.go +++ b/deployment/ccip/changeset/v1_5/cs_jobspec.go @@ -1,6 +1,7 @@ package v1_5 import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment" @@ -41,14 +42,14 @@ func (j JobSpecInput) Validate() error { return fmt.Errorf("DestinationChainSelector is invalid: %w", err) } if j.TokenPricesUSDPipeline == "" && j.PriceGetterConfigJson == "" { - return fmt.Errorf("TokenPricesUSDPipeline or PriceGetterConfigJson is required") + return errors.New("TokenPricesUSDPipeline or PriceGetterConfigJson is required") } if j.USDCCfg != nil { if err := j.USDCCfg.ValidateUSDCConfig(); err != nil { return fmt.Errorf("USDCCfg is invalid: %w", err) } if j.USDCAttestationAPI == "" { - return fmt.Errorf("USDCAttestationAPI is required") + return errors.New("USDCAttestationAPI is required") } } return nil diff --git a/deployment/ccip/view/v1_0/rmn_proxy_contract.go b/deployment/ccip/view/v1_0/rmn_proxy_contract.go index 818b9fcac93..5a2ea2807f6 100644 --- a/deployment/ccip/view/v1_0/rmn_proxy_contract.go +++ b/deployment/ccip/view/v1_0/rmn_proxy_contract.go @@ -1,6 +1,7 @@ package v1_0 import ( + "errors" "fmt" "github.com/ethereum/go-ethereum/common" @@ -16,7 +17,7 @@ type RMNProxyView struct { func GenerateRMNProxyView(r *rmn_proxy_contract.RMNProxy) (RMNProxyView, error) { if r == nil { - return RMNProxyView{}, fmt.Errorf("cannot generate view for nil RMNProxy") + return RMNProxyView{}, errors.New("cannot generate view for nil RMNProxy") } meta, err := types.NewContractMetaData(r, r.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_2/price_registry.go b/deployment/ccip/view/v1_2/price_registry.go index ee0f1067b6c..269c48fccaf 100644 --- a/deployment/ccip/view/v1_2/price_registry.go +++ b/deployment/ccip/view/v1_2/price_registry.go @@ -1,6 +1,7 @@ package v1_2 import ( + "errors" "fmt" "github.com/ethereum/go-ethereum/common" @@ -18,7 +19,7 @@ type PriceRegistryView struct { func GeneratePriceRegistryView(pr *price_registry_1_2_0.PriceRegistry) (PriceRegistryView, error) { if pr == nil { - return PriceRegistryView{}, fmt.Errorf("cannot generate view for nil PriceRegistry") + return PriceRegistryView{}, errors.New("cannot generate view for nil PriceRegistry") } meta, err := types.NewContractMetaData(pr, pr.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_2/price_registry_test.go b/deployment/ccip/view/v1_2/price_registry_test.go index cbcdbe253ce..8248f55335b 100644 --- a/deployment/ccip/view/v1_2/price_registry_test.go +++ b/deployment/ccip/view/v1_2/price_registry_test.go @@ -29,10 +29,10 @@ func TestGeneratePriceRegistryView(t *testing.T) { v, err := GeneratePriceRegistryView(c) require.NoError(t, err) assert.Equal(t, v.Owner, chain.DeployerKey.From) - assert.Equal(t, v.TypeAndVersion, "PriceRegistry 1.2.0") - assert.Equal(t, v.FeeTokens, []common.Address{f1, f2}) - assert.Equal(t, v.StalenessThreshold, "10") - assert.Equal(t, v.Updaters, []common.Address{chain.DeployerKey.From}) + assert.Equal(t, "PriceRegistry 1.2.0", v.TypeAndVersion) + assert.Equal(t, []common.Address{f1, f2}, v.FeeTokens) + assert.Equal(t, "10", v.StalenessThreshold) + assert.Equal(t, []common.Address{chain.DeployerKey.From}, v.Updaters) _, err = json.MarshalIndent(v, "", " ") require.NoError(t, err) } diff --git a/deployment/ccip/view/v1_5/commit_store.go b/deployment/ccip/view/v1_5/commit_store.go index ffea3b61f5f..396aa8b737a 100644 --- a/deployment/ccip/view/v1_5/commit_store.go +++ b/deployment/ccip/view/v1_5/commit_store.go @@ -1,6 +1,7 @@ package v1_5 import ( + "errors" "fmt" "github.com/ethereum/go-ethereum/common" @@ -24,7 +25,7 @@ type CommitStoreView struct { func GenerateCommitStoreView(c *commit_store.CommitStore) (CommitStoreView, error) { if c == nil { - return CommitStoreView{}, fmt.Errorf("cannot generate view for nil CommitStore") + return CommitStoreView{}, errors.New("cannot generate view for nil CommitStore") } meta, err := types.NewContractMetaData(c, c.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_5/offramp.go b/deployment/ccip/view/v1_5/offramp.go index 95e40d9da27..95d92c445e4 100644 --- a/deployment/ccip/view/v1_5/offramp.go +++ b/deployment/ccip/view/v1_5/offramp.go @@ -1,6 +1,7 @@ package v1_5 import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment/common/view/types" @@ -15,7 +16,7 @@ type OffRampView struct { func GenerateOffRampView(r *evm_2_evm_offramp.EVM2EVMOffRamp) (OffRampView, error) { if r == nil { - return OffRampView{}, fmt.Errorf("cannot generate view for nil OffRamp") + return OffRampView{}, errors.New("cannot generate view for nil OffRamp") } meta, err := types.NewContractMetaData(r, r.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_5/offramp_test.go b/deployment/ccip/view/v1_5/offramp_test.go index d6539fe2ba5..47370501424 100644 --- a/deployment/ccip/view/v1_5/offramp_test.go +++ b/deployment/ccip/view/v1_5/offramp_test.go @@ -54,7 +54,7 @@ func TestOffRampView(t *testing.T) { v, err := GenerateOffRampView(c2) require.NoError(t, err) assert.Equal(t, v.StaticConfig, sc) - assert.Equal(t, v.TypeAndVersion, "EVM2EVMOffRamp 1.5.0") + assert.Equal(t, "EVM2EVMOffRamp 1.5.0", v.TypeAndVersion) _, err = json.MarshalIndent(v, "", " ") require.NoError(t, err) } diff --git a/deployment/ccip/view/v1_5/onramp.go b/deployment/ccip/view/v1_5/onramp.go index d679f6c14c0..c211c493cbc 100644 --- a/deployment/ccip/view/v1_5/onramp.go +++ b/deployment/ccip/view/v1_5/onramp.go @@ -1,6 +1,7 @@ package v1_5 import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment/common/view/types" @@ -15,7 +16,7 @@ type OnRampView struct { func GenerateOnRampView(r *evm_2_evm_onramp.EVM2EVMOnRamp) (OnRampView, error) { if r == nil { - return OnRampView{}, fmt.Errorf("cannot generate view for nil OnRamp") + return OnRampView{}, errors.New("cannot generate view for nil OnRamp") } meta, err := types.NewContractMetaData(r, r.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_5/onramp_test.go b/deployment/ccip/view/v1_5/onramp_test.go index 4d7ef0225a6..6ce21c9f032 100644 --- a/deployment/ccip/view/v1_5/onramp_test.go +++ b/deployment/ccip/view/v1_5/onramp_test.go @@ -64,8 +64,7 @@ func TestOnRampView(t *testing.T) { // Check a few fields. assert.Equal(t, v.StaticConfig.ChainSelector, chain.Selector) assert.Equal(t, v.DynamicConfig.Router, common.HexToAddress("0x4")) - assert.Equal(t, v.TypeAndVersion, "EVM2EVMOnRamp 1.5.0") + assert.Equal(t, "EVM2EVMOnRamp 1.5.0", v.TypeAndVersion) _, err = json.MarshalIndent(v, "", " ") require.NoError(t, err) - } diff --git a/deployment/ccip/view/v1_5/rmn.go b/deployment/ccip/view/v1_5/rmn.go index cef55460446..19535cf508e 100644 --- a/deployment/ccip/view/v1_5/rmn.go +++ b/deployment/ccip/view/v1_5/rmn.go @@ -1,6 +1,7 @@ package v1_5 import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment/common/view/types" @@ -14,7 +15,7 @@ type RMNView struct { func GenerateRMNView(r *rmn_contract.RMNContract) (RMNView, error) { if r == nil { - return RMNView{}, fmt.Errorf("cannot generate view for nil RMN") + return RMNView{}, errors.New("cannot generate view for nil RMN") } meta, err := types.NewContractMetaData(r, r.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_5/rmn_test.go b/deployment/ccip/view/v1_5/rmn_test.go index 3ec7d7a9cc9..f4ea35a116f 100644 --- a/deployment/ccip/view/v1_5/rmn_test.go +++ b/deployment/ccip/view/v1_5/rmn_test.go @@ -45,8 +45,8 @@ func TestGenerateRMNView(t *testing.T) { v, err := GenerateRMNView(c) require.NoError(t, err) assert.Equal(t, v.Owner, chain.DeployerKey.From) - assert.Equal(t, v.TypeAndVersion, "RMN 1.5.0") - assert.Equal(t, v.ConfigDetails.Version, uint32(1)) + assert.Equal(t, "RMN 1.5.0", v.TypeAndVersion) + assert.Equal(t, uint32(1), v.ConfigDetails.Version) assert.Equal(t, v.ConfigDetails.Config, cfg) _, err = json.MarshalIndent(v, "", " ") require.NoError(t, err) diff --git a/deployment/ccip/view/v1_5/tokenadminregistry.go b/deployment/ccip/view/v1_5/tokenadminregistry.go index 2fd36615bcd..e4a88996247 100644 --- a/deployment/ccip/view/v1_5/tokenadminregistry.go +++ b/deployment/ccip/view/v1_5/tokenadminregistry.go @@ -1,6 +1,7 @@ package v1_5 import ( + "errors" "fmt" "github.com/ethereum/go-ethereum/common" @@ -20,7 +21,7 @@ type TokenAdminRegistryView struct { func GenerateTokenAdminRegistryView(taContract *token_admin_registry.TokenAdminRegistry) (TokenAdminRegistryView, error) { if taContract == nil { - return TokenAdminRegistryView{}, fmt.Errorf("token admin registry contract is nil") + return TokenAdminRegistryView{}, errors.New("token admin registry contract is nil") } tokens, err := getAllConfiguredTokensPaginated(taContract) if err != nil { diff --git a/deployment/ccip/view/v1_6/ccip_home.go b/deployment/ccip/view/v1_6/ccip_home.go index b188c32c079..04b7dc8c1af 100644 --- a/deployment/ccip/view/v1_6/ccip_home.go +++ b/deployment/ccip/view/v1_6/ccip_home.go @@ -1,6 +1,7 @@ package v1_6 import ( + "errors" "fmt" "math/big" @@ -33,7 +34,7 @@ type CCIPHomeView struct { func GenerateCCIPHomeView(cr *capabilities_registry.CapabilitiesRegistry, ch *ccip_home.CCIPHome) (CCIPHomeView, error) { if ch == nil { - return CCIPHomeView{}, fmt.Errorf("cannot generate view for nil CCIPHome") + return CCIPHomeView{}, errors.New("cannot generate view for nil CCIPHome") } meta, err := types.NewContractMetaData(ch, ch.Address()) if err != nil { diff --git a/deployment/ccip/view/v1_6/ccip_home_test.go b/deployment/ccip/view/v1_6/ccip_home_test.go index 8ea79e8eac3..3d4701d705a 100644 --- a/deployment/ccip/view/v1_6/ccip_home_test.go +++ b/deployment/ccip/view/v1_6/ccip_home_test.go @@ -33,7 +33,7 @@ func TestCCIPHomeView(t *testing.T) { v, err := GenerateCCIPHomeView(cr, ch) require.NoError(t, err) - assert.Equal(t, v.TypeAndVersion, "CCIPHome 1.6.0-dev") + assert.Equal(t, "CCIPHome 1.6.0-dev", v.TypeAndVersion) _, err = json.MarshalIndent(v, "", " ") require.NoError(t, err) diff --git a/deployment/ccip/view/v1_6/rmnhome.go b/deployment/ccip/view/v1_6/rmnhome.go index 82d39074d6f..b05d15bc223 100644 --- a/deployment/ccip/view/v1_6/rmnhome.go +++ b/deployment/ccip/view/v1_6/rmnhome.go @@ -7,6 +7,7 @@ import ( "math/big" "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/smartcontractkit/chainlink/deployment/common/view/types" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/rmn_home" ) diff --git a/deployment/common/changeset/deploy_link_token.go b/deployment/common/changeset/deploy_link_token.go index c115a7ee083..0c648939c9f 100644 --- a/deployment/common/changeset/deploy_link_token.go +++ b/deployment/common/changeset/deploy_link_token.go @@ -1,7 +1,7 @@ package changeset import ( - "fmt" + "errors" "github.com/smartcontractkit/chainlink-common/pkg/logger" @@ -17,7 +17,7 @@ func DeployLinkToken(e deployment.Environment, chains []uint64) (deployment.Chan for _, chain := range chains { _, ok := e.Chains[chain] if !ok { - return deployment.ChangesetOutput{}, fmt.Errorf("chain not found in environment") + return deployment.ChangesetOutput{}, errors.New("chain not found in environment") } } newAddresses := deployment.NewMemoryAddressBook() diff --git a/deployment/common/changeset/example/add_mint_burners_link.go b/deployment/common/changeset/example/add_mint_burners_link.go index 7322f99dd60..d41734519d7 100644 --- a/deployment/common/changeset/example/add_mint_burners_link.go +++ b/deployment/common/changeset/example/add_mint_burners_link.go @@ -18,7 +18,6 @@ var _ deployment.ChangeSet[*AddMintersBurnersLinkConfig] = AddMintersBurnersLink // AddMintersBurnersLink grants the minter / burner role to the provided addresses. func AddMintersBurnersLink(e deployment.Environment, cfg *AddMintersBurnersLinkConfig) (deployment.ChangesetOutput, error) { - chain := e.Chains[cfg.ChainSelector] addresses, err := e.ExistingAddresses.AddressesForChain(cfg.ChainSelector) if err != nil { @@ -66,5 +65,4 @@ func AddMintersBurnersLink(e deployment.Environment, cfg *AddMintersBurnersLinkC } } return deployment.ChangesetOutput{}, nil - } diff --git a/deployment/common/changeset/example/link_transfer.go b/deployment/common/changeset/example/link_transfer.go index 2e3be48a4d1..6253be187c0 100644 --- a/deployment/common/changeset/example/link_transfer.go +++ b/deployment/common/changeset/example/link_transfer.go @@ -135,7 +135,6 @@ func initStatePerChain(cfg *LinkTransferConfig, e deployment.Environment) ( mcmsStatePerChain, err = changeset.MaybeLoadMCMSWithTimelockState(e, chainSelectors) if err != nil { return nil, nil, err - } return linkStatePerChain, mcmsStatePerChain, nil } @@ -160,12 +159,10 @@ func transferOrBuildTx( } } return tx, nil - } // LinkTransfer takes the given link transfers and executes them or creates an MCMS proposal for them. func LinkTransfer(e deployment.Environment, cfg *LinkTransferConfig) (deployment.ChangesetOutput, error) { - err := cfg.Validate(e) if err != nil { return deployment.ChangesetOutput{}, fmt.Errorf("invalid LinkTransferConfig: %w", err) diff --git a/deployment/common/changeset/example/link_transfer_test.go b/deployment/common/changeset/example/link_transfer_test.go index eecfbd37c95..4cc2bd0880b 100644 --- a/deployment/common/changeset/example/link_transfer_test.go +++ b/deployment/common/changeset/example/link_transfer_test.go @@ -25,7 +25,6 @@ import ( // setupLinkTransferContracts deploys all required contracts for the link transfer tests and returns the updated env. func setupLinkTransferTestEnv(t *testing.T) deployment.Environment { - lggr := logger.TestLogger(t) cfg := memory.MemoryEnvironmentConfig{ Nodes: 1, diff --git a/deployment/common/changeset/example/mint_link.go b/deployment/common/changeset/example/mint_link.go index dc50f8a1a27..8a71555928e 100644 --- a/deployment/common/changeset/example/mint_link.go +++ b/deployment/common/changeset/example/mint_link.go @@ -19,7 +19,6 @@ var _ deployment.ChangeSet[*MintLinkConfig] = MintLink // MintLink mints LINK to the provided contract. func MintLink(e deployment.Environment, cfg *MintLinkConfig) (deployment.ChangesetOutput, error) { - chain := e.Chains[cfg.ChainSelector] addresses, err := e.ExistingAddresses.AddressesForChain(cfg.ChainSelector) if err != nil { @@ -39,5 +38,4 @@ func MintLink(e deployment.Environment, cfg *MintLinkConfig) (deployment.Changes return deployment.ChangesetOutput{}, err } return deployment.ChangesetOutput{}, nil - } diff --git a/deployment/common/changeset/internal/mcms.go b/deployment/common/changeset/internal/mcms.go index baa82d77c8f..61808e1cbbd 100644 --- a/deployment/common/changeset/internal/mcms.go +++ b/deployment/common/changeset/internal/mcms.go @@ -27,7 +27,7 @@ func DeployMCMSWithConfig( chain.Client, ) return deployment.ContractDeploy[*owner_helpers.ManyChainMultiSig]{ - mcmAddr, mcm, tx, deployment.NewTypeAndVersion(contractType, deployment.Version1_0_0), err2, + Address: mcmAddr, Contract: mcm, Tx: tx, Tv: deployment.NewTypeAndVersion(contractType, deployment.Version1_0_0), Err: err2, } }) if err != nil { @@ -115,7 +115,7 @@ func DeployMCMSWithTimelockContracts( []common.Address{bypasser.Address}, // bypassers ) return deployment.ContractDeploy[*owner_helpers.RBACTimelock]{ - timelock, cc, tx2, deployment.NewTypeAndVersion(types.RBACTimelock, deployment.Version1_0_0), err2, + Address: timelock, Contract: cc, Tx: tx2, Tv: deployment.NewTypeAndVersion(types.RBACTimelock, deployment.Version1_0_0), Err: err2, } }) if err != nil { @@ -131,7 +131,7 @@ func DeployMCMSWithTimelockContracts( timelock.Address, ) return deployment.ContractDeploy[*owner_helpers.CallProxy]{ - callProxy, cc, tx2, deployment.NewTypeAndVersion(types.CallProxy, deployment.Version1_0_0), err2, + Address: callProxy, Contract: cc, Tx: tx2, Tv: deployment.NewTypeAndVersion(types.CallProxy, deployment.Version1_0_0), Err: err2, } }) if err != nil { diff --git a/deployment/common/changeset/save_existing.go b/deployment/common/changeset/save_existing.go index a5177c8e49b..57e53607cdc 100644 --- a/deployment/common/changeset/save_existing.go +++ b/deployment/common/changeset/save_existing.go @@ -30,13 +30,13 @@ func (cfg ExistingContractsConfig) Validate() error { return fmt.Errorf("invalid chain selector: %d - %w", ec.ChainSelector, err) } if ec.Address == (common.Address{}) { - return fmt.Errorf("address must be set") + return errors.New("address must be set") } if ec.TypeAndVersion.Type == "" { - return fmt.Errorf("type must be set") + return errors.New("type must be set") } if val, err := ec.TypeAndVersion.Version.Value(); err != nil || val == "" { - return fmt.Errorf("version must be set") + return errors.New("version must be set") } } return nil diff --git a/deployment/common/changeset/set_config_mcms.go b/deployment/common/changeset/set_config_mcms.go index 3ba5d2db4b6..5e2dc718b95 100644 --- a/deployment/common/changeset/set_config_mcms.go +++ b/deployment/common/changeset/set_config_mcms.go @@ -192,7 +192,6 @@ func SetConfigMCMS(e deployment.Environment, cfg MCMSConfig) (deployment.Changes batch := addTxsToProposalBatch(setConfigTxsChain, chainSelector, *state) batches = append(batches, batch) } - } if useMCMS { diff --git a/deployment/common/changeset/set_config_mcms_test.go b/deployment/common/changeset/set_config_mcms_test.go index 7220bdd755a..207b37c00f3 100644 --- a/deployment/common/changeset/set_config_mcms_test.go +++ b/deployment/common/changeset/set_config_mcms_test.go @@ -7,6 +7,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/config" chain_selectors "github.com/smartcontractkit/chain-selectors" + "github.com/smartcontractkit/chainlink-common/pkg/utils/tests" commonchangeset "github.com/smartcontractkit/chainlink/deployment/common/changeset" @@ -24,7 +25,6 @@ import ( // setupSetConfigTestEnv deploys all required contracts for the setConfig MCMS contract call. func setupSetConfigTestEnv(t *testing.T) deployment.Environment { - lggr := logger.TestLogger(t) cfg := memory.MemoryEnvironmentConfig{ Nodes: 1, @@ -53,7 +53,6 @@ func setupSetConfigTestEnv(t *testing.T) deployment.Environment { // TestSetConfigMCMSVariants tests the SetConfigMCMS changeset variants. func TestSetConfigMCMSVariants(t *testing.T) { - // Add the timelock as a signer to check state changes for _, tc := range []struct { name string @@ -62,7 +61,6 @@ func TestSetConfigMCMSVariants(t *testing.T) { { name: "MCMS disabled", changeSets: func(mcmsState *commonchangeset.MCMSWithTimelockState, chainSel uint64, cfgProp, cfgCancel, cfgBypass config.Config) []commonchangeset.ChangesetApplication { - return []commonchangeset.ChangesetApplication{ { Changeset: commonchangeset.WrapChangeSet(commonchangeset.SetConfigMCMS), diff --git a/deployment/common/changeset/test_helpers.go b/deployment/common/changeset/test_helpers.go index e92b36e5b55..5d524e542ad 100644 --- a/deployment/common/changeset/test_helpers.go +++ b/deployment/common/changeset/test_helpers.go @@ -5,6 +5,7 @@ import ( "testing" mapset "github.com/deckarep/golang-set/v2" + jobv1 "github.com/smartcontractkit/chainlink-protos/job-distributor/v1/job" "github.com/smartcontractkit/chainlink-testing-framework/lib/utils/testcontext" diff --git a/deployment/common/changeset/transfer_to_mcms_with_timelock.go b/deployment/common/changeset/transfer_to_mcms_with_timelock.go index 45efccefd2e..ab0883cc9a7 100644 --- a/deployment/common/changeset/transfer_to_mcms_with_timelock.go +++ b/deployment/common/changeset/transfer_to_mcms_with_timelock.go @@ -36,11 +36,11 @@ func LoadOwnableContract(addr common.Address, client bind.ContractBackend) (comm // Just using the ownership interface from here. c, err := burn_mint_erc677.NewBurnMintERC677(addr, client) if err != nil { - return common.Address{}, nil, fmt.Errorf("failed to create contract: %v", err) + return common.Address{}, nil, fmt.Errorf("failed to create contract: %w", err) } owner, err := c.Owner(nil) if err != nil { - return common.Address{}, nil, fmt.Errorf("failed to get owner of contract: %v", err) + return common.Address{}, nil, fmt.Errorf("failed to get owner of contract: %w", err) } return owner, c, nil } @@ -52,13 +52,13 @@ func (t TransferToMCMSWithTimelockConfig) Validate(e deployment.Environment) err // Note this also assures non-zero addresses. if exists, err := deployment.AddressBookContains(e.ExistingAddresses, chainSelector, contract.String()); err != nil || !exists { if err != nil { - return fmt.Errorf("failed to check address book: %v", err) + return fmt.Errorf("failed to check address book: %w", err) } return fmt.Errorf("contract %s not found in address book", contract) } owner, _, err := LoadOwnableContract(contract, e.Chains[chainSelector].Client) if err != nil { - return fmt.Errorf("failed to load ownable: %v", err) + return fmt.Errorf("failed to load ownable: %w", err) } if owner != e.Chains[chainSelector].DeployerKey.From { return fmt.Errorf("contract %s is not owned by the deployer key", contract) @@ -66,10 +66,10 @@ func (t TransferToMCMSWithTimelockConfig) Validate(e deployment.Environment) err } // If there is no timelock and mcms proposer on the chain, the transfer will fail. if _, err := deployment.SearchAddressBook(e.ExistingAddresses, chainSelector, types.RBACTimelock); err != nil { - return fmt.Errorf("timelock not present on the chain %v", err) + return fmt.Errorf("timelock not present on the chain %w", err) } if _, err := deployment.SearchAddressBook(e.ExistingAddresses, chainSelector, types.ProposerManyChainMultisig); err != nil { - return fmt.Errorf("mcms proposer not present on the chain %v", err) + return fmt.Errorf("mcms proposer not present on the chain %w", err) } } @@ -101,7 +101,7 @@ func TransferToMCMSWithTimelock( timelocksByChain[chainSelector] = common.HexToAddress(timelockAddr) proposer, err := owner_helpers.NewManyChainMultiSig(common.HexToAddress(proposerAddr), e.Chains[chainSelector].Client) if err != nil { - return deployment.ChangesetOutput{}, fmt.Errorf("failed to create proposer mcms: %v", err) + return deployment.ChangesetOutput{}, fmt.Errorf("failed to create proposer mcms: %w", err) } proposersByChain[chainSelector] = proposer @@ -118,7 +118,7 @@ func TransferToMCMSWithTimelock( tx, err := c.TransferOwnership(e.Chains[chainSelector].DeployerKey, common.HexToAddress(timelockAddr)) _, err = deployment.ConfirmIfNoError(e.Chains[chainSelector], tx, err) if err != nil { - return deployment.ChangesetOutput{}, fmt.Errorf("failed to transfer ownership of contract %T: %v", contract, err) + return deployment.ChangesetOutput{}, fmt.Errorf("failed to transfer ownership of contract %T: %w", contract, err) } tx, err = c.AcceptOwnership(deployment.SimTransactOpts()) if err != nil { diff --git a/deployment/common/proposalutils/mcms_helpers.go b/deployment/common/proposalutils/mcms_helpers.go index 51a720a4389..8b7153e526b 100644 --- a/deployment/common/proposalutils/mcms_helpers.go +++ b/deployment/common/proposalutils/mcms_helpers.go @@ -10,6 +10,7 @@ import ( "github.com/ethereum/go-ethereum/common" owner_helpers "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/common/types" @@ -76,21 +77,21 @@ type RunTimelockExecutorConfig struct { func (cfg RunTimelockExecutorConfig) Validate() error { if cfg.Executor == nil { - return fmt.Errorf("executor is nil") + return errors.New("executor is nil") } if cfg.TimelockContracts == nil { - return fmt.Errorf("timelock contracts is nil") + return errors.New("timelock contracts is nil") } if cfg.ChainSelector == 0 { - return fmt.Errorf("chain selector is 0") + return errors.New("chain selector is 0") } if cfg.BlockStart != nil && cfg.BlockEnd == nil { if *cfg.BlockStart > *cfg.BlockEnd { - return fmt.Errorf("block start is greater than block end") + return errors.New("block start is greater than block end") } } if cfg.BlockStart == nil && cfg.BlockEnd != nil { - return fmt.Errorf("block start must not be nil when block end is not nil") + return errors.New("block start must not be nil when block end is not nil") } if len(cfg.Executor.Operations[mcms.ChainIdentifier(cfg.ChainSelector)]) == 0 { diff --git a/deployment/common/proposalutils/propose.go b/deployment/common/proposalutils/propose.go index baf506cb2f8..874bbecbdb8 100644 --- a/deployment/common/proposalutils/propose.go +++ b/deployment/common/proposalutils/propose.go @@ -1,6 +1,7 @@ package proposalutils import ( + "errors" "fmt" "time" @@ -15,7 +16,6 @@ const ( DefaultValidUntil = 72 * time.Hour ) - func BuildProposalMetadata( chainSelectors []uint64, proposerMcmsesPerChain map[uint64]*gethwrappers.ManyChainMultiSig, @@ -55,7 +55,7 @@ func BuildProposalFromBatches( minDelay time.Duration, ) (*timelock.MCMSWithTimelockProposal, error) { if len(batches) == 0 { - return nil, fmt.Errorf("no operations in batch") + return nil, errors.New("no operations in batch") } chains := mapset.NewSet[uint64]() diff --git a/deployment/common/types/types.go b/deployment/common/types/types.go index 0f04421af43..c86fb3e9887 100644 --- a/deployment/common/types/types.go +++ b/deployment/common/types/types.go @@ -1,7 +1,7 @@ package types import ( - "fmt" + "errors" "math/big" "time" @@ -52,40 +52,40 @@ type OCRParameters struct { func (params OCRParameters) Validate() error { if params.DeltaProgress <= 0 { - return fmt.Errorf("deltaProgress must be positive") + return errors.New("deltaProgress must be positive") } if params.DeltaResend <= 0 { - return fmt.Errorf("deltaResend must be positive") + return errors.New("deltaResend must be positive") } if params.DeltaInitial <= 0 { - return fmt.Errorf("deltaInitial must be positive") + return errors.New("deltaInitial must be positive") } if params.DeltaRound <= 0 { - return fmt.Errorf("deltaRound must be positive") + return errors.New("deltaRound must be positive") } if params.DeltaGrace <= 0 { - return fmt.Errorf("deltaGrace must be positive") + return errors.New("deltaGrace must be positive") } if params.DeltaCertifiedCommitRequest <= 0 { - return fmt.Errorf("deltaCertifiedCommitRequest must be positive") + return errors.New("deltaCertifiedCommitRequest must be positive") } if params.DeltaStage <= 0 { - return fmt.Errorf("deltaStage must be positive") + return errors.New("deltaStage must be positive") } if params.Rmax <= 0 { - return fmt.Errorf("rmax must be positive") + return errors.New("rmax must be positive") } if params.MaxDurationQuery <= 0 { - return fmt.Errorf("maxDurationQuery must be positive") + return errors.New("maxDurationQuery must be positive") } if params.MaxDurationObservation <= 0 { - return fmt.Errorf("maxDurationObservation must be positive") + return errors.New("maxDurationObservation must be positive") } if params.MaxDurationShouldAcceptAttestedReport <= 0 { - return fmt.Errorf("maxDurationShouldAcceptAttestedReport must be positive") + return errors.New("maxDurationShouldAcceptAttestedReport must be positive") } if params.MaxDurationShouldTransmitAcceptedReport <= 0 { - return fmt.Errorf("maxDurationShouldTransmitAcceptedReport must be positive") + return errors.New("maxDurationShouldTransmitAcceptedReport must be positive") } return nil } diff --git a/deployment/common/view/nops.go b/deployment/common/view/nops.go index 61e16d59145..74f011dfe44 100644 --- a/deployment/common/view/nops.go +++ b/deployment/common/view/nops.go @@ -2,9 +2,11 @@ package view import ( "context" + "encoding/hex" "fmt" "github.com/pkg/errors" + nodev1 "github.com/smartcontractkit/chainlink-protos/job-distributor/v1/node" "github.com/smartcontractkit/chainlink/deployment" @@ -62,11 +64,11 @@ func GenerateNopsView(nodeIds []string, oc deployment.OffchainClient) (map[strin } for details, ocrConfig := range node.SelToOCRConfig { nop.OCRKeys[details.ChainName] = OCRKeyView{ - OffchainPublicKey: fmt.Sprintf("%x", ocrConfig.OffchainPublicKey[:]), + OffchainPublicKey: hex.EncodeToString(ocrConfig.OffchainPublicKey[:]), OnchainPublicKey: fmt.Sprintf("%x", ocrConfig.OnchainPublicKey[:]), PeerID: ocrConfig.PeerID.String(), TransmitAccount: string(ocrConfig.TransmitAccount), - ConfigEncryptionPublicKey: fmt.Sprintf("%x", ocrConfig.ConfigEncryptionPublicKey[:]), + ConfigEncryptionPublicKey: hex.EncodeToString(ocrConfig.ConfigEncryptionPublicKey[:]), KeyBundleID: ocrConfig.KeyBundleID, } } diff --git a/deployment/common/view/v1_0/capreg_test.go b/deployment/common/view/v1_0/capreg_test.go index 15fe23be00e..eb7c8a83bd4 100644 --- a/deployment/common/view/v1_0/capreg_test.go +++ b/deployment/common/view/v1_0/capreg_test.go @@ -149,7 +149,7 @@ func TestCapRegView_Denormalize(t *testing.T) { {Name: "third nop"}, }, Capabilities: []CapabilityView{ - //capabilities for don1 + // capabilities for don1 NewCapabilityView(cr.CapabilitiesRegistryCapabilityInfo{ HashedId: [32]byte{0: 1}, LabelledName: "cap1", @@ -161,7 +161,7 @@ func TestCapRegView_Denormalize(t *testing.T) { Version: "1.0.0", }), - //capabilities for don2 + // capabilities for don2 NewCapabilityView(cr.CapabilitiesRegistryCapabilityInfo{ HashedId: [32]byte{2: 2}, // matches don ID 2, capabitility ID 1 LabelledName: "other cap", diff --git a/deployment/common/view/v1_0/link_token.go b/deployment/common/view/v1_0/link_token.go index 6dd1a00be3b..2d345f1fd3c 100644 --- a/deployment/common/view/v1_0/link_token.go +++ b/deployment/common/view/v1_0/link_token.go @@ -44,8 +44,8 @@ func GenerateLinkTokenView(lt *link_token.LinkToken) (LinkTokenView, error) { return LinkTokenView{ ContractMetaData: types.ContractMetaData{ TypeAndVersion: deployment.TypeAndVersion{ - commontypes.LinkToken, - deployment.Version1_0_0, + Type: commontypes.LinkToken, + Version: deployment.Version1_0_0, }.String(), Address: lt.Address(), Owner: owner, diff --git a/deployment/common/view/v1_0/link_token_test.go b/deployment/common/view/v1_0/link_token_test.go index c83c0b3e3c2..735d7789169 100644 --- a/deployment/common/view/v1_0/link_token_test.go +++ b/deployment/common/view/v1_0/link_token_test.go @@ -26,12 +26,12 @@ func TestLinkTokenView(t *testing.T) { require.NoError(t, err) assert.Equal(t, v.Owner, chain.DeployerKey.From) - assert.Equal(t, v.TypeAndVersion, "LinkToken 1.0.0") - assert.Equal(t, v.Decimals, uint8(18)) + assert.Equal(t, "LinkToken 1.0.0", v.TypeAndVersion) + assert.Equal(t, uint8(18), v.Decimals) // Initially nothing minted and no minters/burners. - assert.Equal(t, v.Supply.String(), "0") - require.Len(t, v.Minters, 0) - require.Len(t, v.Burners, 0) + assert.Equal(t, "0", v.Supply.String()) + require.Empty(t, v.Minters) + require.Empty(t, v.Burners) // Add some minters tx, err = lt.GrantMintAndBurnRoles(chain.DeployerKey, chain.DeployerKey.From) @@ -45,7 +45,7 @@ func TestLinkTokenView(t *testing.T) { v, err = GenerateLinkTokenView(lt) require.NoError(t, err) - assert.Equal(t, v.Supply.String(), "100") + assert.Equal(t, "100", v.Supply.String()) require.Len(t, v.Minters, 1) require.Equal(t, v.Minters[0].String(), chain.DeployerKey.From.String()) require.Len(t, v.Burners, 1) diff --git a/deployment/common/view/v1_0/static_link_token.go b/deployment/common/view/v1_0/static_link_token.go index 525f1a9f0c5..2c9c60531b2 100644 --- a/deployment/common/view/v1_0/static_link_token.go +++ b/deployment/common/view/v1_0/static_link_token.go @@ -28,8 +28,8 @@ func GenerateStaticLinkTokenView(lt *link_token_interface.LinkToken) (StaticLink return StaticLinkTokenView{ ContractMetaData: types.ContractMetaData{ TypeAndVersion: deployment.TypeAndVersion{ - commontypes.StaticLinkToken, - deployment.Version1_0_0, + Type: commontypes.StaticLinkToken, + Version: deployment.Version1_0_0, }.String(), Address: lt.Address(), // No owner. diff --git a/deployment/common/view/v1_0/static_link_token_test.go b/deployment/common/view/v1_0/static_link_token_test.go index 517efac9438..b276a80fb2e 100644 --- a/deployment/common/view/v1_0/static_link_token_test.go +++ b/deployment/common/view/v1_0/static_link_token_test.go @@ -26,7 +26,7 @@ func TestStaticLinkTokenView(t *testing.T) { require.NoError(t, err) assert.Equal(t, v.Owner, common.HexToAddress("0x0")) // Ownerless - assert.Equal(t, v.TypeAndVersion, "StaticLinkToken 1.0.0") - assert.Equal(t, v.Decimals, uint8(18)) - assert.Equal(t, v.Supply.String(), "1000000000000000000000000000") + assert.Equal(t, "StaticLinkToken 1.0.0", v.TypeAndVersion) + assert.Equal(t, uint8(18), v.Decimals) + assert.Equal(t, "1000000000000000000000000000", v.Supply.String()) } diff --git a/deployment/environment.go b/deployment/environment.go index bfbeac2f0c4..6fc28fac764 100644 --- a/deployment/environment.go +++ b/deployment/environment.go @@ -7,6 +7,7 @@ import ( "fmt" "math/big" "sort" + "strconv" "strings" "github.com/ethereum/go-ethereum/accounts/abi/bind" @@ -75,7 +76,7 @@ func (c Chain) Name() string { panic(err) } if chainInfo.ChainName == "" { - return fmt.Sprintf("%d", c.Selector) + return strconv.FormatUint(c.Selector, 10) } return chainInfo.ChainName } diff --git a/deployment/environment/crib/types.go b/deployment/environment/crib/types.go index d19c8424443..99baf8e8774 100644 --- a/deployment/environment/crib/types.go +++ b/deployment/environment/crib/types.go @@ -2,6 +2,7 @@ package crib import ( "context" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/environment/devenv" diff --git a/deployment/environment/devenv/chain.go b/deployment/environment/devenv/chain.go index 5c6c4336ed7..265a6647050 100644 --- a/deployment/environment/devenv/chain.go +++ b/deployment/environment/devenv/chain.go @@ -2,6 +2,7 @@ package devenv import ( "context" + "errors" "fmt" "math/big" "time" @@ -39,7 +40,7 @@ func (c *ChainConfig) SetUsers(pvtkeys []string) error { c.Users = []*bind.TransactOpts{c.DeployerKey} return nil } else { - return fmt.Errorf("no private keys provided for users, deployer key is also not set") + return errors.New("no private keys provided for users, deployer key is also not set") } } for _, pvtKeyStr := range pvtkeys { diff --git a/deployment/environment/devenv/don.go b/deployment/environment/devenv/don.go index 76f6ee92b68..a132fe72a2f 100644 --- a/deployment/environment/devenv/don.go +++ b/deployment/environment/devenv/don.go @@ -4,14 +4,16 @@ import ( "context" "errors" "fmt" - chainsel "github.com/smartcontractkit/chain-selectors" "strconv" "strings" "time" + chainsel "github.com/smartcontractkit/chain-selectors" + "github.com/hashicorp/go-multierror" "github.com/rs/zerolog" "github.com/sethvargo/go-retry" + nodev1 "github.com/smartcontractkit/chainlink-protos/job-distributor/v1/node" clclient "github.com/smartcontractkit/chainlink/deployment/environment/nodeclient" "github.com/smartcontractkit/chainlink/deployment/environment/web/sdk/client" @@ -395,7 +397,7 @@ func (n *Node) CreateJobDistributor(ctx context.Context, jd JobDistributor) (str // create the job distributor in the node with the csa key resp, err := n.gqlClient.ListJobDistributors(ctx) if err != nil { - return "", fmt.Errorf("could not list job distrubutors: %w", err) + return "", fmt.Errorf("could not list job distributors: %w", err) } if len(resp.FeedsManagers.Results) > 0 { for _, fm := range resp.FeedsManagers.Results { diff --git a/deployment/environment/devenv/don_test.go b/deployment/environment/devenv/don_test.go index f93436f72f5..0e0578f8275 100644 --- a/deployment/environment/devenv/don_test.go +++ b/deployment/environment/devenv/don_test.go @@ -7,7 +7,6 @@ import ( ) func TestPtrVal(t *testing.T) { - x := "hello" xptr := ptr(x) got := value(xptr) diff --git a/deployment/environment/devenv/environment.go b/deployment/environment/devenv/environment.go index 121caea43bb..2fffe6adf2b 100644 --- a/deployment/environment/devenv/environment.go +++ b/deployment/environment/devenv/environment.go @@ -2,6 +2,7 @@ package devenv import ( "context" + "errors" "fmt" "github.com/smartcontractkit/chainlink-common/pkg/logger" @@ -30,10 +31,10 @@ func NewEnvironment(ctx func() context.Context, lggr logger.Logger, config Envir jd, ok := offChain.(*JobDistributor) if !ok { - return nil, nil, fmt.Errorf("offchain client does not implement JobDistributor") + return nil, nil, errors.New("offchain client does not implement JobDistributor") } if jd == nil { - return nil, nil, fmt.Errorf("offchain client is not set up") + return nil, nil, errors.New("offchain client is not set up") } var nodeIDs []string if jd.don != nil { diff --git a/deployment/environment/devenv/jd.go b/deployment/environment/devenv/jd.go index 818f9b09400..844068e50da 100644 --- a/deployment/environment/devenv/jd.go +++ b/deployment/environment/devenv/jd.go @@ -2,6 +2,7 @@ package devenv import ( "context" + "errors" "fmt" "golang.org/x/oauth2" @@ -121,7 +122,7 @@ func (jd JobDistributor) GetCSAPublicKey(ctx context.Context) (string, error) { return "", err } if keypairs == nil || len(keypairs.Keypairs) == 0 { - return "", fmt.Errorf("no keypairs found") + return "", errors.New("no keypairs found") } csakey := keypairs.Keypairs[0].PublicKey return csakey, nil @@ -138,7 +139,7 @@ func (jd JobDistributor) ProposeJob(ctx context.Context, in *jobv1.ProposeJobReq return nil, fmt.Errorf("failed to propose job. err: %w", err) } if res.Proposal == nil { - return nil, fmt.Errorf("failed to propose job. err: proposal is nil") + return nil, errors.New("failed to propose job. err: proposal is nil") } if jd.don == nil || len(jd.don.Nodes) == 0 { return res, nil diff --git a/deployment/environment/memory/job_client.go b/deployment/environment/memory/job_client.go index a3cfee41608..e44c664b77e 100644 --- a/deployment/environment/memory/job_client.go +++ b/deployment/environment/memory/job_client.go @@ -29,42 +29,42 @@ type JobClient struct { } func (j JobClient) BatchProposeJob(ctx context.Context, in *jobv1.BatchProposeJobRequest, opts ...grpc.CallOption) (*jobv1.BatchProposeJobResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) UpdateJob(ctx context.Context, in *jobv1.UpdateJobRequest, opts ...grpc.CallOption) (*jobv1.UpdateJobResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) DisableNode(ctx context.Context, in *nodev1.DisableNodeRequest, opts ...grpc.CallOption) (*nodev1.DisableNodeResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) EnableNode(ctx context.Context, in *nodev1.EnableNodeRequest, opts ...grpc.CallOption) (*nodev1.EnableNodeResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) RegisterNode(ctx context.Context, in *nodev1.RegisterNodeRequest, opts ...grpc.CallOption) (*nodev1.RegisterNodeResponse, error) { - //TODO implement me + // TODO implement me panic("implement me") } func (j JobClient) UpdateNode(ctx context.Context, in *nodev1.UpdateNodeRequest, opts ...grpc.CallOption) (*nodev1.UpdateNodeResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) GetKeypair(ctx context.Context, in *csav1.GetKeypairRequest, opts ...grpc.CallOption) (*csav1.GetKeypairResponse, error) { - //TODO implement me + // TODO implement me panic("implement me") } func (j JobClient) ListKeypairs(ctx context.Context, in *csav1.ListKeypairsRequest, opts ...grpc.CallOption) (*csav1.ListKeypairsResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } @@ -84,7 +84,7 @@ func (j JobClient) GetNode(ctx context.Context, in *nodev1.GetNodeRequest, opts } func (j JobClient) ListNodes(ctx context.Context, in *nodev1.ListNodesRequest, opts ...grpc.CallOption) (*nodev1.ListNodesResponse, error) { - //TODO CCIP-3108 + // TODO CCIP-3108 include := func(node *nodev1.Node) bool { if in.Filter == nil { return true @@ -273,22 +273,22 @@ func (j JobClient) ListNodeChainConfigs(ctx context.Context, in *nodev1.ListNode } func (j JobClient) GetJob(ctx context.Context, in *jobv1.GetJobRequest, opts ...grpc.CallOption) (*jobv1.GetJobResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) GetProposal(ctx context.Context, in *jobv1.GetProposalRequest, opts ...grpc.CallOption) (*jobv1.GetProposalResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) ListJobs(ctx context.Context, in *jobv1.ListJobsRequest, opts ...grpc.CallOption) (*jobv1.ListJobsResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) ListProposals(ctx context.Context, in *jobv1.ListProposalsRequest, opts ...grpc.CallOption) (*jobv1.ListProposalsResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } @@ -338,12 +338,12 @@ func (j JobClient) ProposeJob(ctx context.Context, in *jobv1.ProposeJobRequest, } func (j JobClient) RevokeJob(ctx context.Context, in *jobv1.RevokeJobRequest, opts ...grpc.CallOption) (*jobv1.RevokeJobResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } func (j JobClient) DeleteJob(ctx context.Context, in *jobv1.DeleteJobRequest, opts ...grpc.CallOption) (*jobv1.DeleteJobResponse, error) { - //TODO CCIP-3108 implement me + // TODO CCIP-3108 implement me panic("implement me") } diff --git a/deployment/environment/nodeclient/chainlink.go b/deployment/environment/nodeclient/chainlink.go index 9b92dd12759..9aed808d5be 100644 --- a/deployment/environment/nodeclient/chainlink.go +++ b/deployment/environment/nodeclient/chainlink.go @@ -7,6 +7,7 @@ import ( "math/big" "net/http" "os" + "strconv" "strings" "sync" "time" @@ -492,7 +493,7 @@ func (c *ChainlinkClient) DeleteP2PKey(id int) (*http.Response, error) { c.l.Info().Str(NodeURL, c.Config.URL).Int("ID", id).Msg("Deleting P2P Key") resp, err := c.APIClient.R(). SetPathParams(map[string]string{ - "id": fmt.Sprint(id), + "id": strconv.Itoa(id), }). Delete("/v2/keys/p2p/{id}") if err != nil { @@ -528,7 +529,7 @@ func (c *ChainlinkClient) UpdateEthKeyMaxGasPriceGWei(keyId string, gWei int) (* "keyId": keyId, }). SetQueryParams(map[string]string{ - "maxGasPriceGWei": fmt.Sprint(gWei), + "maxGasPriceGWei": strconv.Itoa(gWei), }). SetResult(ethKey). Put("/v2/keys/eth/{keyId}") @@ -1031,7 +1032,7 @@ func (c *ChainlinkClient) Profile(profileTime time.Duration, profileFunction fun "reportType": profileReport.Type, }). SetQueryParams(map[string]string{ - "seconds": fmt.Sprint(profileSeconds), + "seconds": strconv.Itoa(profileSeconds), }). Get("/v2/debug/pprof/{reportType}") if err != nil { @@ -1222,10 +1223,10 @@ func (c *ChainlinkClient) ReplayLogPollerFromBlock(fromBlock, evmChainID int64) resp, err := c.APIClient.R(). SetResult(&specObj). SetQueryParams(map[string]string{ - "evmChainID": fmt.Sprint(evmChainID), + "evmChainID": strconv.FormatInt(evmChainID, 10), }). SetPathParams(map[string]string{ - "fromBlock": fmt.Sprint(fromBlock), + "fromBlock": strconv.FormatInt(fromBlock, 10), }). Post("/v2/replay_from_block/{fromBlock}") if err != nil { diff --git a/deployment/environment/web/sdk/client/client.go b/deployment/environment/web/sdk/client/client.go index e0a56b9e642..331376b2e9f 100644 --- a/deployment/environment/web/sdk/client/client.go +++ b/deployment/environment/web/sdk/client/client.go @@ -3,13 +3,15 @@ package client import ( "context" "encoding/json" + "errors" "fmt" - "github.com/Khan/genqlient/graphql" - "github.com/sethvargo/go-retry" "net/http" "strings" "time" + "github.com/Khan/genqlient/graphql" + "github.com/sethvargo/go-retry" + "github.com/smartcontractkit/chainlink/deployment/environment/web/sdk/client/doer" "github.com/smartcontractkit/chainlink/deployment/environment/web/sdk/internal/generated" ) @@ -61,7 +63,7 @@ func New(baseURI string, creds Credentials) (Client, error) { endpoints: ep, credentials: creds, } - + err := retry.Do(context.Background(), retry.WithMaxDuration(10*time.Second, retry.NewFibonacci(2*time.Second)), func(ctx context.Context) error { err := c.login() if err != nil { @@ -87,7 +89,7 @@ func (c *client) FetchCSAPublicKey(ctx context.Context) (*string, error) { return nil, err } if keys == nil || len(keys.CsaKeys.GetResults()) == 0 { - return nil, fmt.Errorf("no CSA keys found") + return nil, errors.New("no CSA keys found") } return &keys.CsaKeys.GetResults()[0].PublicKey, nil } @@ -98,7 +100,7 @@ func (c *client) FetchP2PPeerID(ctx context.Context) (*string, error) { return nil, err } if keys == nil || len(keys.P2pKeys.GetResults()) == 0 { - return nil, fmt.Errorf("no P2P keys found") + return nil, errors.New("no P2P keys found") } return &keys.P2pKeys.GetResults()[0].PeerID, nil } @@ -109,7 +111,7 @@ func (c *client) FetchOCR2KeyBundleID(ctx context.Context, chainType string) (st return "", err } if keyBundles == nil || len(keyBundles.GetOcr2KeyBundles().Results) == 0 { - return "", fmt.Errorf("no ocr2 keybundle found, check if ocr2 is enabled") + return "", errors.New("no ocr2 keybundle found, check if ocr2 is enabled") } for _, keyBundle := range keyBundles.GetOcr2KeyBundles().Results { if keyBundle.ChainType == generated.OCR2ChainType(chainType) { @@ -125,7 +127,7 @@ func (c *client) FetchAccountAddress(ctx context.Context, chainID string) (*stri return nil, err } if keys == nil || len(keys.EthKeys.GetResults()) == 0 { - return nil, fmt.Errorf("no accounts found") + return nil, errors.New("no accounts found") } for _, keyDetail := range keys.EthKeys.GetResults() { if keyDetail.GetChain().Enabled && keyDetail.GetChain().Id == chainID { @@ -141,7 +143,7 @@ func (c *client) FetchKeys(ctx context.Context, chainType string) ([]string, err return nil, err } if keys == nil { - return nil, fmt.Errorf("no accounts found") + return nil, errors.New("no accounts found") } switch generated.OCR2ChainType(chainType) { case generated.OCR2ChainTypeAptos: @@ -183,12 +185,12 @@ func (c *client) GetJobDistributor(ctx context.Context, id string) (generated.Fe return generated.FeedsManagerParts{}, err } if res == nil { - return generated.FeedsManagerParts{}, fmt.Errorf("no feeds manager found") + return generated.FeedsManagerParts{}, errors.New("no feeds manager found") } if success, ok := res.GetFeedsManager().(*generated.GetFeedsManagerFeedsManager); ok { return success.FeedsManagerParts, nil } - return generated.FeedsManagerParts{}, fmt.Errorf("failed to get feeds manager") + return generated.FeedsManagerParts{}, errors.New("failed to get feeds manager") } func (c *client) ListJobDistributors(ctx context.Context) (*generated.ListFeedsManagersResponse, error) { @@ -238,12 +240,12 @@ func (c *client) CreateJobDistributorChainConfig(ctx context.Context, in JobDist return "", err } if res == nil { - return "", fmt.Errorf("failed to create feeds manager chain config") + return "", errors.New("failed to create feeds manager chain config") } if success, ok := res.GetCreateFeedsManagerChainConfig().(*generated.CreateFeedsManagerChainConfigCreateFeedsManagerChainConfigCreateFeedsManagerChainConfigSuccess); ok { return success.ChainConfig.Id, nil } - return "", fmt.Errorf("failed to create feeds manager chain config") + return "", errors.New("failed to create feeds manager chain config") } func (c *client) DeleteJobDistributorChainConfig(ctx context.Context, id string) error { @@ -252,12 +254,12 @@ func (c *client) DeleteJobDistributorChainConfig(ctx context.Context, id string) return err } if res == nil { - return fmt.Errorf("failed to delete feeds manager chain config") + return errors.New("failed to delete feeds manager chain config") } if _, ok := res.GetDeleteFeedsManagerChainConfig().(*generated.DeleteFeedsManagerChainConfigDeleteFeedsManagerChainConfigDeleteFeedsManagerChainConfigSuccess); ok { return nil } - return fmt.Errorf("failed to delete feeds manager chain config") + return errors.New("failed to delete feeds manager chain config") } func (c *client) GetJobProposal(ctx context.Context, id string) (*generated.GetJobProposalJobProposal, error) { @@ -266,12 +268,12 @@ func (c *client) GetJobProposal(ctx context.Context, id string) (*generated.GetJ return nil, err } if proposal == nil { - return nil, fmt.Errorf("no job proposal found") + return nil, errors.New("no job proposal found") } if success, ok := proposal.GetJobProposal().(*generated.GetJobProposalJobProposal); ok { return success, nil } - return nil, fmt.Errorf("failed to get job proposal") + return nil, errors.New("failed to get job proposal") } func (c *client) ApproveJobProposalSpec(ctx context.Context, id string, force bool) (*JobProposalApprovalSuccessSpec, error) { @@ -289,7 +291,7 @@ func (c *client) ApproveJobProposalSpec(ctx context.Context, id string, force bo return &cmd, nil } } - return nil, fmt.Errorf("failed to approve job proposal spec") + return nil, errors.New("failed to approve job proposal spec") } func (c *client) CancelJobProposalSpec(ctx context.Context, id string) (*generated.CancelJobProposalSpecResponse, error) { @@ -327,7 +329,7 @@ func (c *client) login() error { cookieHeader := res.Header.Get("Set-Cookie") if cookieHeader == "" { - return fmt.Errorf("no cookie found in header") + return errors.New("no cookie found in header") } c.cookie = strings.Split(cookieHeader, ";")[0] diff --git a/deployment/environment/web/sdk/client/types.go b/deployment/environment/web/sdk/client/types.go index d213ee161c6..9ecc2cc72ea 100644 --- a/deployment/environment/web/sdk/client/types.go +++ b/deployment/environment/web/sdk/client/types.go @@ -3,7 +3,7 @@ package client import ( "bytes" "encoding/json" - "fmt" + "errors" "reflect" ) @@ -47,7 +47,7 @@ type JobProposalApprovalSuccessSpec struct { func DecodeInput(in, out any) error { if reflect.TypeOf(out).Kind() != reflect.Ptr || reflect.ValueOf(out).IsNil() { - return fmt.Errorf("out type must be a non-nil pointer") + return errors.New("out type must be a non-nil pointer") } jsonBytes, err := json.Marshal(in) if err != nil { diff --git a/deployment/evm_kmsclient.go b/deployment/evm_kmsclient.go index b28a3842930..811125827af 100644 --- a/deployment/evm_kmsclient.go +++ b/deployment/evm_kmsclient.go @@ -11,6 +11,7 @@ import ( "os" "github.com/aws/aws-sdk-go/aws/session" + "github.com/pkg/errors" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/kms" @@ -59,10 +60,10 @@ type KMS struct { func NewKMSClient(config KMS) (KMSClient, error) { if config.KmsDeployerKeyId == "" { - return nil, fmt.Errorf("KMS key ID is required") + return nil, errors.New("KMS key ID is required") } if config.KmsDeployerKeyRegion == "" { - return nil, fmt.Errorf("KMS key region is required") + return nil, errors.New("KMS key region is required") } var awsSessionFn AwsSessionFn if config.AwsProfileName != "" { @@ -94,7 +95,7 @@ func (c *EVMKMSClient) GetKMSTransactOpts(ctx context.Context, chainID *big.Int) pubKeyBytes := secp256k1.S256().Marshal(ecdsaPublicKey.X, ecdsaPublicKey.Y) keyAddr := crypto.PubkeyToAddress(*ecdsaPublicKey) if chainID == nil { - return nil, fmt.Errorf("chainID is required") + return nil, errors.New("chainID is required") } signer := types.LatestSignerForChainID(chainID) @@ -195,7 +196,7 @@ func recoverEthSignature(expectedPublicKeyBytes, txHash, r, s []byte) ([]byte, e } if hex.EncodeToString(recoveredPublicKeyBytes) != hex.EncodeToString(expectedPublicKeyBytes) { - return nil, fmt.Errorf("can not reconstruct public key from sig") + return nil, errors.New("can not reconstruct public key from sig") } } @@ -238,15 +239,15 @@ func KMSConfigFromEnvVars() (KMS, error) { var exists bool config.KmsDeployerKeyId, exists = os.LookupEnv("KMS_DEPLOYER_KEY_ID") if !exists { - return config, fmt.Errorf("KMS_DEPLOYER_KEY_ID is required") + return config, errors.New("KMS_DEPLOYER_KEY_ID is required") } config.KmsDeployerKeyRegion, exists = os.LookupEnv("KMS_DEPLOYER_KEY_REGION") if !exists { - return config, fmt.Errorf("KMS_DEPLOYER_KEY_REGION is required") + return config, errors.New("KMS_DEPLOYER_KEY_REGION is required") } config.AwsProfileName, exists = os.LookupEnv("AWS_PROFILE") if !exists { - return config, fmt.Errorf("AWS_PROFILE is required") + return config, errors.New("AWS_PROFILE is required") } return config, nil } diff --git a/deployment/helpers.go b/deployment/helpers.go index 34a2584a544..d8e15d0200d 100644 --- a/deployment/helpers.go +++ b/deployment/helpers.go @@ -132,7 +132,6 @@ func DecodeErr(encodedABI string, err error) error { return fmt.Errorf("failed to decode error '%s' with abi: %w", encErr, parseErr) } return fmt.Errorf("contract error: %s", errStr) - } return fmt.Errorf("cannot decode error with abi: %w", err) } @@ -182,7 +181,7 @@ func DeployContract[C any]( func IsValidChainSelector(cs uint64) error { if cs == 0 { - return fmt.Errorf("chain selector must be set") + return errors.New("chain selector must be set") } _, err := chain_selectors.GetSelectorFamily(cs) if err != nil { diff --git a/deployment/keystone/changeset/append_node_capabilities.go b/deployment/keystone/changeset/append_node_capabilities.go index d558cf39c95..9ae1923d270 100644 --- a/deployment/keystone/changeset/append_node_capabilities.go +++ b/deployment/keystone/changeset/append_node_capabilities.go @@ -1,11 +1,13 @@ package changeset import ( + "errors" "fmt" "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal" @@ -30,7 +32,7 @@ func AppendNodeCapabilities(env deployment.Environment, req *AppendNodeCapabilit out := deployment.ChangesetOutput{} if req.UseMCMS() { if r.Ops == nil { - return out, fmt.Errorf("expected MCMS operation to be non-nil") + return out, errors.New("expected MCMS operation to be non-nil") } timelocksPerChain := map[uint64]common.Address{ c.Chain.Selector: c.ContractSet.Timelock.Address(), diff --git a/deployment/keystone/changeset/append_node_capabilities_test.go b/deployment/keystone/changeset/append_node_capabilities_test.go index fb2c99ed15e..3cf6081e966 100644 --- a/deployment/keystone/changeset/append_node_capabilities_test.go +++ b/deployment/keystone/changeset/append_node_capabilities_test.go @@ -38,7 +38,7 @@ func TestAppendNodeCapabilities(t *testing.T) { }) newCapabilities := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) newCapabilities[k] = caps @@ -52,7 +52,7 @@ func TestAppendNodeCapabilities(t *testing.T) { csOut, err := changeset.AppendNodeCapabilities(te.Env, &cfg) require.NoError(t, err) - require.Len(t, csOut.Proposals, 0) + require.Empty(t, csOut.Proposals) require.Nil(t, csOut.AddressBook) validateCapabilityAppends(t, te, newCapabilities) @@ -68,7 +68,7 @@ func TestAppendNodeCapabilities(t *testing.T) { }) newCapabilities := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) newCapabilities[k] = caps @@ -105,7 +105,6 @@ func TestAppendNodeCapabilities(t *testing.T) { require.NoError(t, err) validateCapabilityAppends(t, te, newCapabilities) }) - } // validateUpdate checks reads nodes from the registry and checks they have the expected updates diff --git a/deployment/keystone/changeset/deploy_consumer.go b/deployment/keystone/changeset/deploy_consumer.go index d94d7ac0adc..5442a21576a 100644 --- a/deployment/keystone/changeset/deploy_consumer.go +++ b/deployment/keystone/changeset/deploy_consumer.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment" @@ -19,7 +20,7 @@ func DeployFeedsConsumer(env deployment.Environment, req *DeployFeedsConsumerReq lggr := env.Logger chain, ok := env.Chains[chainSelector] if !ok { - return deployment.ChangesetOutput{}, fmt.Errorf("chain not found in environment") + return deployment.ChangesetOutput{}, errors.New("chain not found in environment") } ab := deployment.NewMemoryAddressBook() deployResp, err := kslib.DeployFeedsConsumer(chain, ab) diff --git a/deployment/keystone/changeset/deploy_consumer_test.go b/deployment/keystone/changeset/deploy_consumer_test.go index 9a1e8f57da7..e73986b6ecf 100644 --- a/deployment/keystone/changeset/deploy_consumer_test.go +++ b/deployment/keystone/changeset/deploy_consumer_test.go @@ -36,5 +36,5 @@ func TestDeployFeedsConsumer(t *testing.T) { // no feeds consumer registry on chain 1 require.NotEqual(t, registrySel, env.AllChainSelectors()[1]) oaddrs, _ := resp.AddressBook.AddressesForChain(env.AllChainSelectors()[1]) - require.Len(t, oaddrs, 0) + require.Empty(t, oaddrs) } diff --git a/deployment/keystone/changeset/deploy_forwarder.go b/deployment/keystone/changeset/deploy_forwarder.go index 66923140e6a..8a9cdf4d681 100644 --- a/deployment/keystone/changeset/deploy_forwarder.go +++ b/deployment/keystone/changeset/deploy_forwarder.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "maps" "slices" @@ -8,6 +9,7 @@ import ( "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal" @@ -59,7 +61,7 @@ type ConfigureForwardContractsRequest struct { func (r ConfigureForwardContractsRequest) Validate() error { if len(r.WFNodeIDs) == 0 { - return fmt.Errorf("WFNodeIDs must not be empty") + return errors.New("WFNodeIDs must not be empty") } return nil } @@ -96,7 +98,7 @@ func ConfigureForwardContracts(env deployment.Environment, req ConfigureForwardC var out deployment.ChangesetOutput if req.UseMCMS() { if len(r.OpsPerChain) == 0 { - return out, fmt.Errorf("expected MCMS operation to be non-nil") + return out, errors.New("expected MCMS operation to be non-nil") } for chainSelector, op := range r.OpsPerChain { contracts := cresp.ContractSets[chainSelector] diff --git a/deployment/keystone/changeset/deploy_forwarder_test.go b/deployment/keystone/changeset/deploy_forwarder_test.go index ec80a9432b0..40ef0c02aeb 100644 --- a/deployment/keystone/changeset/deploy_forwarder_test.go +++ b/deployment/keystone/changeset/deploy_forwarder_test.go @@ -4,9 +4,8 @@ import ( "fmt" "testing" - "go.uber.org/zap/zapcore" - "github.com/stretchr/testify/require" + "go.uber.org/zap/zapcore" "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" @@ -65,7 +64,7 @@ func TestConfigureForwarders(t *testing.T) { }) var wfNodes []string - for id, _ := range te.WFNodes { + for id := range te.WFNodes { wfNodes = append(wfNodes, id) } @@ -77,7 +76,7 @@ func TestConfigureForwarders(t *testing.T) { csOut, err := changeset.ConfigureForwardContracts(te.Env, cfg) require.NoError(t, err) require.Nil(t, csOut.AddressBook) - require.Len(t, csOut.Proposals, 0) + require.Empty(t, csOut.Proposals) // check that forwarder // TODO set up a listener to check that the forwarder is configured contractSet := te.ContractSets() @@ -103,7 +102,7 @@ func TestConfigureForwarders(t *testing.T) { }) var wfNodes []string - for id, _ := range te.WFNodes { + for id := range te.WFNodes { wfNodes = append(wfNodes, id) } @@ -134,9 +133,7 @@ func TestConfigureForwarders(t *testing.T) { }, }) require.NoError(t, err) - }) } }) - } diff --git a/deployment/keystone/changeset/deploy_ocr3.go b/deployment/keystone/changeset/deploy_ocr3.go index ba5ea2921d9..75f9b75ecd1 100644 --- a/deployment/keystone/changeset/deploy_ocr3.go +++ b/deployment/keystone/changeset/deploy_ocr3.go @@ -2,6 +2,7 @@ package changeset import ( "encoding/json" + "errors" "fmt" "io" @@ -23,7 +24,7 @@ func DeployOCR3(env deployment.Environment, registryChainSel uint64) (deployment // ocr3 only deployed on registry chain c, ok := env.Chains[registryChainSel] if !ok { - return deployment.ChangesetOutput{}, fmt.Errorf("chain not found in environment") + return deployment.ChangesetOutput{}, errors.New("chain not found in environment") } ocr3Resp, err := kslib.DeployOCR3(c, ab) if err != nil { @@ -74,14 +75,14 @@ func ConfigureOCR3Contract(env deployment.Environment, cfg ConfigureOCR3Config) return deployment.ChangesetOutput{}, fmt.Errorf("failed to write response output: %w", err) } if n != len(b) { - return deployment.ChangesetOutput{}, fmt.Errorf("failed to write all bytes") + return deployment.ChangesetOutput{}, errors.New("failed to write all bytes") } } // does not create any new addresses var out deployment.ChangesetOutput if cfg.UseMCMS() { if resp.Ops == nil { - return out, fmt.Errorf("expected MCMS operation to be non-nil") + return out, errors.New("expected MCMS operation to be non-nil") } r, err := kslib.GetContractSets(env.Logger, &kslib.GetContractSetsRequest{ Chains: env.Chains, @@ -109,7 +110,6 @@ func ConfigureOCR3Contract(env deployment.Environment, cfg ConfigureOCR3Config) return out, fmt.Errorf("failed to build proposal: %w", err) } out.Proposals = []timelock.MCMSWithTimelockProposal{*proposal} - } return out, nil } diff --git a/deployment/keystone/changeset/deploy_ocr3_test.go b/deployment/keystone/changeset/deploy_ocr3_test.go index ea984989703..5ede6c5e6c7 100644 --- a/deployment/keystone/changeset/deploy_ocr3_test.go +++ b/deployment/keystone/changeset/deploy_ocr3_test.go @@ -5,11 +5,10 @@ import ( "encoding/json" "testing" - "go.uber.org/zap/zapcore" - "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.uber.org/zap/zapcore" "github.com/smartcontractkit/chainlink-common/pkg/logger" @@ -43,7 +42,7 @@ func TestDeployOCR3(t *testing.T) { // nothing on chain 1 require.NotEqual(t, registrySel, env.AllChainSelectors()[1]) oaddrs, _ := resp.AddressBook.AddressesForChain(env.AllChainSelectors()[1]) - assert.Len(t, oaddrs, 0) + assert.Empty(t, oaddrs) } func TestConfigureOCR3(t *testing.T) { @@ -55,7 +54,6 @@ func TestConfigureOCR3(t *testing.T) { } t.Run("no mcms", func(t *testing.T) { - te := test.SetupTestEnv(t, test.TestConfig{ WFDonConfig: test.DonConfig{N: 4}, AssetDonConfig: test.DonConfig{N: 4}, @@ -295,5 +293,4 @@ func TestConfigureOCR3(t *testing.T) { }) require.NoError(t, err) }) - } diff --git a/deployment/keystone/changeset/deploy_registry.go b/deployment/keystone/changeset/deploy_registry.go index 2b8342c06dd..f78b6762f9e 100644 --- a/deployment/keystone/changeset/deploy_registry.go +++ b/deployment/keystone/changeset/deploy_registry.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment" @@ -13,7 +14,7 @@ func DeployCapabilityRegistry(env deployment.Environment, registrySelector uint6 lggr := env.Logger chain, ok := env.Chains[registrySelector] if !ok { - return deployment.ChangesetOutput{}, fmt.Errorf("chain not found in environment") + return deployment.ChangesetOutput{}, errors.New("chain not found in environment") } ab := deployment.NewMemoryAddressBook() capabilitiesRegistryResp, err := kslib.DeployCapabilitiesRegistry(chain, ab) diff --git a/deployment/keystone/changeset/deploy_registry_test.go b/deployment/keystone/changeset/deploy_registry_test.go index 9abf357f2a8..713ef897197 100644 --- a/deployment/keystone/changeset/deploy_registry_test.go +++ b/deployment/keystone/changeset/deploy_registry_test.go @@ -34,5 +34,5 @@ func TestDeployCapabilityRegistry(t *testing.T) { // no capabilities registry on chain 1 require.NotEqual(t, registrySel, env.AllChainSelectors()[1]) oaddrs, _ := resp.AddressBook.AddressesForChain(env.AllChainSelectors()[1]) - require.Len(t, oaddrs, 0) + require.Empty(t, oaddrs) } diff --git a/deployment/keystone/changeset/internal/append_node_capabilities.go b/deployment/keystone/changeset/internal/append_node_capabilities.go index 32fe8572da3..c6379fd24fd 100644 --- a/deployment/keystone/changeset/internal/append_node_capabilities.go +++ b/deployment/keystone/changeset/internal/append_node_capabilities.go @@ -1,6 +1,7 @@ package internal import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink-common/pkg/logger" @@ -20,10 +21,10 @@ type AppendNodeCapabilitiesRequest struct { func (req *AppendNodeCapabilitiesRequest) Validate() error { if len(req.P2pToCapabilities) == 0 { - return fmt.Errorf("p2pToCapabilities is empty") + return errors.New("p2pToCapabilities is empty") } if req.ContractSet.CapabilitiesRegistry == nil { - return fmt.Errorf("registry is nil") + return errors.New("registry is nil") } return nil } diff --git a/deployment/keystone/changeset/internal/capability_management.go b/deployment/keystone/changeset/internal/capability_management.go index 268b4fd0d01..d85c3f0dfff 100644 --- a/deployment/keystone/changeset/internal/capability_management.go +++ b/deployment/keystone/changeset/internal/capability_management.go @@ -6,6 +6,7 @@ import ( "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" diff --git a/deployment/keystone/changeset/internal/contract_set.go b/deployment/keystone/changeset/internal/contract_set.go index e60f37d6f76..540ab80097b 100644 --- a/deployment/keystone/changeset/internal/contract_set.go +++ b/deployment/keystone/changeset/internal/contract_set.go @@ -108,7 +108,6 @@ func DeployFeedsConsumer(chain deployment.Chain, ab deployment.AddressBook) (*De } err = ab.Save(chain.Selector, consumerResp.Address.String(), consumerResp.Tv) if err != nil { - return nil, fmt.Errorf("failed to save FeedsConsumer: %w", err) } return consumerResp, nil diff --git a/deployment/keystone/changeset/internal/deploy.go b/deployment/keystone/changeset/internal/deploy.go index acaabd22131..b52d269518d 100644 --- a/deployment/keystone/changeset/internal/deploy.go +++ b/deployment/keystone/changeset/internal/deploy.go @@ -19,6 +19,7 @@ import ( "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink/deployment" "google.golang.org/protobuf/proto" @@ -187,7 +188,7 @@ func GetRegistryContract(e *deployment.Environment, registryChainSel uint64) (*c } registry = registryChainContracts.CapabilitiesRegistry if registry == nil { - return nil, deployment.Chain{}, fmt.Errorf("no registry contract found") + return nil, deployment.Chain{}, errors.New("no registry contract found") } e.Logger.Debugf("registry contract address: %s, chain %d", registry.Address().String(), registryChainSel) return registry, registryChain, nil @@ -409,7 +410,6 @@ func ConfigureOCR3ContractFromJD(env *deployment.Environment, cfg ConfigureOCR3C OCR2OracleConfig: r.ocrConfig, Ops: r.ops, }, nil - } type RegisterCapabilitiesRequest struct { @@ -445,7 +445,7 @@ func FromCapabilitiesRegistryCapability(cap *capabilities_registry.CapabilitiesR // RegisterCapabilities add computes the capability id, adds it to the registry and associates the registered capabilities with appropriate don(s) func RegisterCapabilities(lggr logger.Logger, req RegisterCapabilitiesRequest) (*RegisterCapabilitiesResponse, error) { if len(req.DonToCapabilities) == 0 { - return nil, fmt.Errorf("no capabilities to register") + return nil, errors.New("no capabilities to register") } cresp, err := GetContractSets(req.Env.Logger, &GetContractSetsRequest{ Chains: req.Env.Chains, @@ -891,7 +891,7 @@ func RegisterDons(lggr logger.Logger, req RegisterDonsRequest) (*RegisterDonsRes return nil, fmt.Errorf("failed to call GetDONs: %w", err) } if !foundAll { - return nil, fmt.Errorf("did not find all desired DONS") + return nil, errors.New("did not find all desired DONS") } resp := RegisterDonsResponse{ @@ -903,7 +903,7 @@ func RegisterDons(lggr logger.Logger, req RegisterDonsRequest) (*RegisterDonsRes lggr.Debugw("irrelevant DON found in the registry, ignoring", "p2p sorted hash", sortedHash(donInfo.NodeP2PIds)) continue } - lggr.Debugw("adding don info to the reponse (keyed by DON name)", "don", donName) + lggr.Debugw("adding don info to the response (keyed by DON name)", "don", donName) resp.DonInfos[donName] = donInfos[i] } return &resp, nil diff --git a/deployment/keystone/changeset/internal/forwarder_deployer.go b/deployment/keystone/changeset/internal/forwarder_deployer.go index 2ce3ae88146..6e374e200d7 100644 --- a/deployment/keystone/changeset/internal/forwarder_deployer.go +++ b/deployment/keystone/changeset/internal/forwarder_deployer.go @@ -5,6 +5,7 @@ import ( "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" diff --git a/deployment/keystone/changeset/internal/ocr3_deployer.go b/deployment/keystone/changeset/internal/ocr3_deployer.go index beafe9bb9e2..35e75b5ec43 100644 --- a/deployment/keystone/changeset/internal/ocr3_deployer.go +++ b/deployment/keystone/changeset/internal/ocr3_deployer.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" diff --git a/deployment/keystone/changeset/internal/ocr3config.go b/deployment/keystone/changeset/internal/ocr3config.go index 74f8a9dabd5..d1d2e337efb 100644 --- a/deployment/keystone/changeset/internal/ocr3config.go +++ b/deployment/keystone/changeset/internal/ocr3config.go @@ -222,7 +222,7 @@ func GenerateOCR3Config(cfg OracleConfig, nca []NodeKeys, secrets deployment.OCR for index := range nca { identities = append(identities, confighelper.OracleIdentityExtra{ OracleIdentity: confighelper.OracleIdentity{ - OnchainPublicKey: onchainPubKeys[index][:], + OnchainPublicKey: onchainPubKeys[index], OffchainPublicKey: offchainPubKeysBytes[index], PeerID: nca[index].P2PPeerID, TransmitAccount: types.Account(nca[index].EthAddress), @@ -303,7 +303,7 @@ type configureOCR3Response struct { func configureOCR3contract(req configureOCR3Request) (*configureOCR3Response, error) { if req.contract == nil { - return nil, fmt.Errorf("OCR3 contract is nil") + return nil, errors.New("OCR3 contract is nil") } ocrConfig, err := req.generateOCR3Config() if err != nil { diff --git a/deployment/keystone/changeset/internal/ocr3config_test.go b/deployment/keystone/changeset/internal/ocr3config_test.go index b412a727eb9..55769fdaece 100644 --- a/deployment/keystone/changeset/internal/ocr3config_test.go +++ b/deployment/keystone/changeset/internal/ocr3config_test.go @@ -115,7 +115,7 @@ func loadTestData(t *testing.T, path string) []deployment.Node { // in general we can map from the view to the node, but we know the test data var nodes []deployment.Node - //for _, nv := range nodeViews { + // for _, nv := range nodeViews { for _, name := range names { nv := nodeViews[name] node := deployment.Node{ diff --git a/deployment/keystone/changeset/internal/types.go b/deployment/keystone/changeset/internal/types.go index 173e3ba1ad0..cffd69f85e6 100644 --- a/deployment/keystone/changeset/internal/types.go +++ b/deployment/keystone/changeset/internal/types.go @@ -1,6 +1,7 @@ package internal import ( + "encoding/hex" "errors" "fmt" "slices" @@ -14,7 +15,7 @@ import ( "github.com/smartcontractkit/chainlink/deployment" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" + capabilities_registry "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" ) @@ -81,9 +82,9 @@ func toNodeKeys(o *deployment.Node, registryChainSel uint64) NodeKeys { EthAddress: string(evmCC.TransmitAccount), P2PPeerID: strings.TrimPrefix(o.PeerID.String(), "p2p_"), OCR2BundleID: evmCC.KeyBundleID, - OCR2OffchainPublicKey: fmt.Sprintf("%x", evmCC.OffchainPublicKey[:]), + OCR2OffchainPublicKey: hex.EncodeToString(evmCC.OffchainPublicKey[:]), OCR2OnchainPublicKey: fmt.Sprintf("%x", evmCC.OnchainPublicKey[:]), - OCR2ConfigPublicKey: fmt.Sprintf("%x", evmCC.ConfigEncryptionPublicKey[:]), + OCR2ConfigPublicKey: hex.EncodeToString(evmCC.ConfigEncryptionPublicKey[:]), CSAPublicKey: o.CSAKey, // default value of encryption public key is the CSA public key // TODO: DEVSVCS-760 @@ -266,7 +267,7 @@ func NewRegisteredDon(env deployment.Environment, cfg RegisteredDonConfig) (*Reg } } if don == nil { - return nil, fmt.Errorf("don not found in registry") + return nil, errors.New("don not found in registry") } return &RegisteredDon{ Name: cfg.Name, @@ -286,11 +287,10 @@ func (d RegisteredDon) Signers(chainFamily string) []common.Address { } var found bool var registryChainDetails chainsel.ChainDetails - for details, _ := range n.SelToOCRConfig { + for details := range n.SelToOCRConfig { if family, err := chainsel.GetSelectorFamily(details.ChainSelector); err == nil && family == chainFamily { found = true registryChainDetails = details - } } if !found { @@ -319,7 +319,6 @@ func joinInfoAndNodes(donInfos map[string]kcr.CapabilitiesRegistryDONInfo, dons } var out []RegisteredDon for donName, info := range donInfos { - ocr2nodes, ok := nodes[donName] if !ok { return nil, fmt.Errorf("nodes not found for don %s", donName) diff --git a/deployment/keystone/changeset/internal/types_test.go b/deployment/keystone/changeset/internal/types_test.go index cfc953d6126..e8d02f51df0 100644 --- a/deployment/keystone/changeset/internal/types_test.go +++ b/deployment/keystone/changeset/internal/types_test.go @@ -10,10 +10,11 @@ import ( "github.com/ethereum/go-ethereum/common" chainsel "github.com/smartcontractkit/chain-selectors" - "github.com/smartcontractkit/chainlink/deployment" - "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" "github.com/smartcontractkit/libocr/offchainreporting2plus/types" "github.com/stretchr/testify/require" + + "github.com/smartcontractkit/chainlink/deployment" + "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" ) func Test_toNodeKeys(t *testing.T) { @@ -49,7 +50,7 @@ func Test_toNodeKeys(t *testing.T) { SelToOCRConfig: map[chainsel.ChainDetails]deployment.OCRConfig{ registryChainDetails: { OffchainPublicKey: types.OffchainPublicKey(common.FromHex("1111111111111111111111111111111111111111111111111111111111111111")), - OnchainPublicKey: signing_1[:], + OnchainPublicKey: signing_1, PeerID: p2pID.PeerID(), TransmitAccount: types.Account(admin_1.String()), ConfigEncryptionPublicKey: encryptionpubkey, diff --git a/deployment/keystone/changeset/internal/update_don.go b/deployment/keystone/changeset/internal/update_don.go index 3cfc386b2ba..aa3e203e5e4 100644 --- a/deployment/keystone/changeset/internal/update_don.go +++ b/deployment/keystone/changeset/internal/update_don.go @@ -5,6 +5,7 @@ import ( "crypto/sha256" "encoding/hex" "encoding/json" + "errors" "fmt" "math/big" "sort" @@ -12,10 +13,11 @@ import ( "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "google.golang.org/protobuf/proto" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" - "google.golang.org/protobuf/proto" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" ) @@ -56,10 +58,10 @@ func (r *UpdateDonRequest) AppendNodeCapabilitiesRequest() *AppendNodeCapabiliti func (r *UpdateDonRequest) Validate() error { if r.ContractSet.CapabilitiesRegistry == nil { - return fmt.Errorf("registry is required") + return errors.New("registry is required") } if len(r.P2PIDs) == 0 { - return fmt.Errorf("p2pIDs is required") + return errors.New("p2pIDs is required") } return nil } diff --git a/deployment/keystone/changeset/internal/update_don_test.go b/deployment/keystone/changeset/internal/update_don_test.go index 57b15138538..bf9ab96fecb 100644 --- a/deployment/keystone/changeset/internal/update_don_test.go +++ b/deployment/keystone/changeset/internal/update_don_test.go @@ -11,6 +11,9 @@ import ( "github.com/ethereum/go-ethereum/common" chainsel "github.com/smartcontractkit/chain-selectors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" kscs "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" @@ -18,8 +21,6 @@ import ( kstest "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal/test" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) var ( @@ -153,7 +154,6 @@ func TestUpdateDon(t *testing.T) { assert.Equal(t, want.DonInfo.ConfigCount, got.DonInfo.ConfigCount) assert.Equal(t, sortedP2Pids(want.DonInfo.NodeP2PIds), sortedP2Pids(got.DonInfo.NodeP2PIds)) assert.Equal(t, capIds(want.DonInfo.CapabilityConfigurations), capIds(got.DonInfo.CapabilityConfigurations)) - }) } @@ -234,7 +234,6 @@ func registerTestDon(t *testing.T, lggr logger.Logger, cfg setupUpdateDonTestCon t.Helper() req := newSetupTestRegistryRequest(t, cfg.dons, cfg.nops) return kstest.SetupTestRegistry(t, lggr, req) - } func newSetupTestRegistryRequest(t *testing.T, dons []internal.DonInfo, nops []internal.NOP) *kstest.SetupTestRegistryRequest { diff --git a/deployment/keystone/changeset/internal/update_node_capabilities.go b/deployment/keystone/changeset/internal/update_node_capabilities.go index 16c37267060..23e3d66965c 100644 --- a/deployment/keystone/changeset/internal/update_node_capabilities.go +++ b/deployment/keystone/changeset/internal/update_node_capabilities.go @@ -1,6 +1,7 @@ package internal import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink-common/pkg/logger" @@ -19,10 +20,10 @@ type UpdateNodeCapabilitiesImplRequest struct { func (req *UpdateNodeCapabilitiesImplRequest) Validate() error { if len(req.P2pToCapabilities) == 0 { - return fmt.Errorf("p2pToCapabilities is empty") + return errors.New("p2pToCapabilities is empty") } if req.ContractSet == nil { - return fmt.Errorf("registry is nil") + return errors.New("registry is nil") } return nil diff --git a/deployment/keystone/changeset/internal/update_nodes.go b/deployment/keystone/changeset/internal/update_nodes.go index b27c17ad19f..976125e582d 100644 --- a/deployment/keystone/changeset/internal/update_nodes.go +++ b/deployment/keystone/changeset/internal/update_nodes.go @@ -12,6 +12,7 @@ import ( "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/mcms" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink-common/pkg/logger" kcr "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/keystone/generated/capabilities_registry_1_1_0" "github.com/smartcontractkit/chainlink/v2/core/services/keystore/keys/p2pkey" @@ -194,7 +195,6 @@ func AppendCapabilities(lggr logger.Logger, registry *kcr.CapabilitiesRegistry, func makeNodeParams(registry *kcr.CapabilitiesRegistry, p2pToUpdates map[p2pkey.PeerID]NodeUpdate) ([]kcr.CapabilitiesRegistryNodeParams, error) { - var out []kcr.CapabilitiesRegistryNodeParams var p2pIds []p2pkey.PeerID for p2pID := range p2pToUpdates { @@ -257,7 +257,6 @@ func makeNodeParams(registry *kcr.CapabilitiesRegistry, }) return out, nil - } // fetchCapabilityIDs fetches the capability ids for the given capabilities diff --git a/deployment/keystone/changeset/internal/update_nodes_test.go b/deployment/keystone/changeset/internal/update_nodes_test.go index 0f22120998a..1b532129e48 100644 --- a/deployment/keystone/changeset/internal/update_nodes_test.go +++ b/deployment/keystone/changeset/internal/update_nodes_test.go @@ -597,7 +597,6 @@ func TestUpdateNodes(t *testing.T) { } func TestAppendCapabilities(t *testing.T) { - var ( capMap = map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability{ testPeerID(t, "peerID_1"): []kcr.CapabilitiesRegistryCapability{ @@ -663,7 +662,6 @@ func TestAppendCapabilities(t *testing.T) { gotCaps2 := appendedResp2[testPeerID(t, "peerID_1")] require.Len(t, gotCaps2, 3) require.EqualValues(t, gotCaps, gotCaps2) - } func testPeerID(t *testing.T, s string) p2pkey.PeerID { diff --git a/deployment/keystone/changeset/update_don.go b/deployment/keystone/changeset/update_don.go index 5b381a4e498..47cb7c82507 100644 --- a/deployment/keystone/changeset/update_don.go +++ b/deployment/keystone/changeset/update_don.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment" @@ -26,10 +27,10 @@ type UpdateDonRequest struct { func (r *UpdateDonRequest) Validate() error { if len(r.P2PIDs) == 0 { - return fmt.Errorf("p2pIDs is required") + return errors.New("p2pIDs is required") } if len(r.CapabilityConfigs) == 0 { - return fmt.Errorf("capabilityConfigs is required") + return errors.New("capabilityConfigs is required") } return nil } @@ -63,10 +64,10 @@ func UpdateDon(env deployment.Environment, req *UpdateDonRequest) (deployment.Ch out := deployment.ChangesetOutput{} if req.UseMCMS() { if updateResult.Ops == nil { - return out, fmt.Errorf("expected MCMS operation to be non-nil") + return out, errors.New("expected MCMS operation to be non-nil") } if len(appendResult.Proposals) == 0 { - return out, fmt.Errorf("expected append node capabilities to return proposals") + return out, errors.New("expected append node capabilities to return proposals") } out.Proposals = appendResult.Proposals @@ -75,10 +76,8 @@ func UpdateDon(env deployment.Environment, req *UpdateDonRequest) (deployment.Ch // this makes the proposal all-or-nothing because all the operations are in the same batch, there is only one tr // transaction and only one proposal out.Proposals[0].Transactions[0].Batch = append(out.Proposals[0].Transactions[0].Batch, updateResult.Ops.Batch...) - } return out, nil - } func appendRequest(r *UpdateDonRequest) *AppendNodeCapabilitiesRequest { diff --git a/deployment/keystone/changeset/update_don_test.go b/deployment/keystone/changeset/update_don_test.go index 2487087e235..74e2609b0a1 100644 --- a/deployment/keystone/changeset/update_don_test.go +++ b/deployment/keystone/changeset/update_don_test.go @@ -41,7 +41,7 @@ func TestUpdateDon(t *testing.T) { // we have to keep track of the existing capabilities to add to the new ones var p2pIDs []p2pkey.PeerID newCapabilities := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) p2pIDs = append(p2pIDs, k) @@ -64,7 +64,7 @@ func TestUpdateDon(t *testing.T) { csOut, err := changeset.UpdateDon(te.Env, &cfg) require.NoError(t, err) - require.Len(t, csOut.Proposals, 0) + require.Empty(t, csOut.Proposals) require.Nil(t, csOut.AddressBook) assertDonContainsCapabilities(t, te.ContractSets()[te.RegistrySelector].CapabilitiesRegistry, caps, p2pIDs) @@ -82,7 +82,7 @@ func TestUpdateDon(t *testing.T) { // contract set is already deployed with capabilities // we have to keep track of the existing capabilities to add to the new ones var p2pIDs []p2pkey.PeerID - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) p2pIDs = append(p2pIDs, k) diff --git a/deployment/keystone/changeset/update_node_capabilities.go b/deployment/keystone/changeset/update_node_capabilities.go index 8c4d01159ed..c96393328db 100644 --- a/deployment/keystone/changeset/update_node_capabilities.go +++ b/deployment/keystone/changeset/update_node_capabilities.go @@ -1,6 +1,7 @@ package changeset import ( + "errors" "fmt" "strconv" @@ -9,6 +10,7 @@ import ( "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset/internal" @@ -61,7 +63,7 @@ type MutateNodeCapabilitiesRequest struct { func (req *MutateNodeCapabilitiesRequest) Validate() error { if len(req.P2pToCapabilities) == 0 { - return fmt.Errorf("p2pToCapabilities is empty") + return errors.New("p2pToCapabilities is empty") } _, exists := chainsel.ChainBySelector(req.RegistryChainSel) if !exists { @@ -118,7 +120,7 @@ func UpdateNodeCapabilities(env deployment.Environment, req *UpdateNodeCapabilit out := deployment.ChangesetOutput{} if req.UseMCMS() { if r.Ops == nil { - return out, fmt.Errorf("expected MCMS operation to be non-nil") + return out, errors.New("expected MCMS operation to be non-nil") } timelocksPerChain := map[uint64]common.Address{ c.Chain.Selector: c.ContractSet.Timelock.Address(), diff --git a/deployment/keystone/changeset/update_node_capabilities_test.go b/deployment/keystone/changeset/update_node_capabilities_test.go index cf6b9601039..8962dfc389d 100644 --- a/deployment/keystone/changeset/update_node_capabilities_test.go +++ b/deployment/keystone/changeset/update_node_capabilities_test.go @@ -41,7 +41,7 @@ func TestUpdateNodeCapabilities(t *testing.T) { // we have to keep track of the existing capabilities to add to the new ones var p2pIDs []p2pkey.PeerID newCapabilities := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) p2pIDs = append(p2pIDs, k) @@ -49,7 +49,6 @@ func TestUpdateNodeCapabilities(t *testing.T) { } t.Run("fails if update drops existing capabilities", func(t *testing.T) { - cfg := changeset.UpdateNodeCapabilitiesRequest{ RegistryChainSel: te.RegistrySelector, P2pToCapabilities: newCapabilities, @@ -73,7 +72,7 @@ func TestUpdateNodeCapabilities(t *testing.T) { csOut, err := changeset.UpdateNodeCapabilities(te.Env, &cfg) require.NoError(t, err) - require.Len(t, csOut.Proposals, 0) + require.Empty(t, csOut.Proposals) require.Nil(t, csOut.AddressBook) validateCapabilityUpdates(t, te, capabiltiesToSet) @@ -92,7 +91,7 @@ func TestUpdateNodeCapabilities(t *testing.T) { // we have to keep track of the existing capabilities to add to the new ones var p2pIDs []p2pkey.PeerID newCapabilities := make(map[p2pkey.PeerID][]kcr.CapabilitiesRegistryCapability) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) p2pIDs = append(p2pIDs, k) @@ -135,9 +134,7 @@ func TestUpdateNodeCapabilities(t *testing.T) { }) require.NoError(t, err) validateCapabilityUpdates(t, te, capabiltiesToSet) - }) - } // validateUpdate checks reads nodes from the registry and checks they have the expected updates diff --git a/deployment/keystone/changeset/update_nodes.go b/deployment/keystone/changeset/update_nodes.go index 10a7ad4e441..4a98f8b06e9 100644 --- a/deployment/keystone/changeset/update_nodes.go +++ b/deployment/keystone/changeset/update_nodes.go @@ -1,12 +1,14 @@ package changeset import ( + "errors" "fmt" "time" "github.com/ethereum/go-ethereum/common" "github.com/smartcontractkit/ccip-owner-contracts/pkg/gethwrappers" "github.com/smartcontractkit/ccip-owner-contracts/pkg/proposal/timelock" + "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/common/proposalutils" @@ -30,7 +32,7 @@ type UpdateNodesRequest struct { func (r *UpdateNodesRequest) Validate() error { if r.P2pToUpdates == nil { - return fmt.Errorf("P2pToUpdates must be non-nil") + return errors.New("P2pToUpdates must be non-nil") } return nil } @@ -74,7 +76,7 @@ func UpdateNodes(env deployment.Environment, req *UpdateNodesRequest) (deploymen out := deployment.ChangesetOutput{} if req.UseMCMS() { if resp.Ops == nil { - return out, fmt.Errorf("expected MCMS operation to be non-nil") + return out, errors.New("expected MCMS operation to be non-nil") } timelocksPerChain := map[uint64]common.Address{ req.RegistryChainSel: contracts.Timelock.Address(), diff --git a/deployment/keystone/changeset/update_nodes_test.go b/deployment/keystone/changeset/update_nodes_test.go index 33662aa669d..5709482ddb3 100644 --- a/deployment/keystone/changeset/update_nodes_test.go +++ b/deployment/keystone/changeset/update_nodes_test.go @@ -28,7 +28,7 @@ func TestUpdateNodes(t *testing.T) { updates := make(map[p2pkey.PeerID]changeset.NodeUpdate) i := uint8(0) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) pubKey := [32]byte{31: i + 1} @@ -48,7 +48,7 @@ func TestUpdateNodes(t *testing.T) { csOut, err := changeset.UpdateNodes(te.Env, &cfg) require.NoError(t, err) - require.Len(t, csOut.Proposals, 0) + require.Empty(t, csOut.Proposals) require.Nil(t, csOut.AddressBook) validateUpdate(t, te, updates) @@ -65,7 +65,7 @@ func TestUpdateNodes(t *testing.T) { updates := make(map[p2pkey.PeerID]changeset.NodeUpdate) i := uint8(0) - for id, _ := range te.WFNodes { + for id := range te.WFNodes { k, err := p2pkey.MakePeerID(id) require.NoError(t, err) pubKey := [32]byte{31: i + 1} @@ -111,7 +111,6 @@ func TestUpdateNodes(t *testing.T) { validateUpdate(t, te, updates) }) - } // validateUpdate checks reads nodes from the registry and checks they have the expected updates diff --git a/deployment/keystone/changeset/view.go b/deployment/keystone/changeset/view.go index 9c8678d8778..f6f495fd30b 100644 --- a/deployment/keystone/changeset/view.go +++ b/deployment/keystone/changeset/view.go @@ -37,7 +37,6 @@ func ViewKeystone(e deployment.Environment) (json.Marshaler, error) { return nil, fmt.Errorf("failed to view contract set: %w", err) } chainViews[chainName] = v - } nopsView, err := commonview.GenerateNopsView(e.NodeIDs, e.Offchain) if err != nil { diff --git a/deployment/keystone/changeset/workflowregistry/deploy.go b/deployment/keystone/changeset/workflowregistry/deploy.go index e55484aa711..bb88918594c 100644 --- a/deployment/keystone/changeset/workflowregistry/deploy.go +++ b/deployment/keystone/changeset/workflowregistry/deploy.go @@ -1,6 +1,7 @@ package workflowregistry import ( + "errors" "fmt" "github.com/smartcontractkit/chainlink/deployment" @@ -12,7 +13,7 @@ func Deploy(env deployment.Environment, registrySelector uint64) (deployment.Cha lggr := env.Logger chain, ok := env.Chains[registrySelector] if !ok { - return deployment.ChangesetOutput{}, fmt.Errorf("chain not found in environment") + return deployment.ChangesetOutput{}, errors.New("chain not found in environment") } ab := deployment.NewMemoryAddressBook() wrResp, err := deployWorkflowRegistry(chain, ab) diff --git a/deployment/keystone/changeset/workflowregistry/deploy_test.go b/deployment/keystone/changeset/workflowregistry/deploy_test.go index 16eb6fa8512..ec40646b378 100644 --- a/deployment/keystone/changeset/workflowregistry/deploy_test.go +++ b/deployment/keystone/changeset/workflowregistry/deploy_test.go @@ -34,5 +34,5 @@ func Test_Deploy(t *testing.T) { // nothing on chain 1 require.NotEqual(t, registrySel, env.AllChainSelectors()[1]) oaddrs, _ := resp.AddressBook.AddressesForChain(env.AllChainSelectors()[1]) - assert.Len(t, oaddrs, 0) + assert.Empty(t, oaddrs) } diff --git a/deployment/keystone/changeset/workflowregistry/setup_test.go b/deployment/keystone/changeset/workflowregistry/setup_test.go index 78e7d852080..ec4d448b93c 100644 --- a/deployment/keystone/changeset/workflowregistry/setup_test.go +++ b/deployment/keystone/changeset/workflowregistry/setup_test.go @@ -3,12 +3,13 @@ package workflowregistry import ( "testing" + "github.com/stretchr/testify/require" + "github.com/smartcontractkit/chainlink-common/pkg/logger" "github.com/smartcontractkit/chainlink/deployment" "github.com/smartcontractkit/chainlink/deployment/environment/memory" "github.com/smartcontractkit/chainlink/deployment/keystone/changeset" workflow_registry "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/workflow/generated/workflow_registry_wrapper" - "github.com/stretchr/testify/require" ) type SetupTestWorkflowRegistryResponse struct { diff --git a/deployment/keystone/changeset/workflowregistry/update_allowed_dons_test.go b/deployment/keystone/changeset/workflowregistry/update_allowed_dons_test.go index f24db609553..aa869ce1517 100644 --- a/deployment/keystone/changeset/workflowregistry/update_allowed_dons_test.go +++ b/deployment/keystone/changeset/workflowregistry/update_allowed_dons_test.go @@ -29,7 +29,7 @@ func TestUpdateAllowedDons(t *testing.T) { dons, err := registry.GetAllAllowedDONs(&bind.CallOpts{}) require.NoError(t, err) - assert.Len(t, dons, 0) + assert.Empty(t, dons) env := deployment.Environment{ Logger: lggr, @@ -53,7 +53,7 @@ func TestUpdateAllowedDons(t *testing.T) { require.NoError(t, err) assert.Len(t, dons, 1) - assert.Equal(t, dons[0], uint32(1)) + assert.Equal(t, uint32(1), dons[0]) _, err = workflowregistry.UpdateAllowedDons( env, @@ -68,7 +68,7 @@ func TestUpdateAllowedDons(t *testing.T) { dons, err = registry.GetAllAllowedDONs(&bind.CallOpts{}) require.NoError(t, err) - assert.Len(t, dons, 0) + assert.Empty(t, dons) } func Test_UpdateAllowedDons_WithMCMS(t *testing.T) { diff --git a/deployment/keystone/changeset/workflowregistry/update_authorized_addresses_test.go b/deployment/keystone/changeset/workflowregistry/update_authorized_addresses_test.go index a8d969fce0c..ed650ed52c6 100644 --- a/deployment/keystone/changeset/workflowregistry/update_authorized_addresses_test.go +++ b/deployment/keystone/changeset/workflowregistry/update_authorized_addresses_test.go @@ -30,7 +30,7 @@ func TestUpdateAuthorizedAddresses(t *testing.T) { dons, err := registry.GetAllAuthorizedAddresses(&bind.CallOpts{}) require.NoError(t, err) - assert.Len(t, dons, 0) + assert.Empty(t, dons) env := deployment.Environment{ Logger: lggr, @@ -70,7 +70,7 @@ func TestUpdateAuthorizedAddresses(t *testing.T) { dons, err = registry.GetAllAuthorizedAddresses(&bind.CallOpts{}) require.NoError(t, err) - assert.Len(t, dons, 0) + assert.Empty(t, dons) } func Test_UpdateAuthorizedAddresses_WithMCMS(t *testing.T) { diff --git a/deployment/keystone/changeset/workflowregistry/workflow_registry_deployer.go b/deployment/keystone/changeset/workflowregistry/workflow_registry_deployer.go index ac5bbd16cc8..6ebe6693482 100644 --- a/deployment/keystone/changeset/workflowregistry/workflow_registry_deployer.go +++ b/deployment/keystone/changeset/workflowregistry/workflow_registry_deployer.go @@ -30,7 +30,6 @@ func (c *workflowRegistryDeployer) Contract() *workflow_registry.WorkflowRegistr } func (c *workflowRegistryDeployer) Deploy(req changeset.DeployRequest) (*changeset.DeployResponse, error) { - addr, tx, wr, err := workflow_registry.DeployWorkflowRegistry( req.Chain.DeployerKey, req.Chain.Client) diff --git a/deployment/multiclient_test.go b/deployment/multiclient_test.go index 2e10c46e33f..152cdbc8d0e 100644 --- a/deployment/multiclient_test.go +++ b/deployment/multiclient_test.go @@ -38,7 +38,7 @@ func TestMultiClient(t *testing.T) { require.NoError(t, err) require.NotNil(t, mc) assert.Equal(t, mc.RetryConfig.Attempts, uint(RPC_DEFAULT_RETRY_ATTEMPTS)) - assert.Equal(t, mc.RetryConfig.Delay, RPC_DEFAULT_RETRY_DELAY) + assert.Equal(t, RPC_DEFAULT_RETRY_DELAY, mc.RetryConfig.Delay) _, err = NewMultiClient(lggr, []RPC{}) require.Error(t, err) @@ -49,5 +49,5 @@ func TestMultiClient(t *testing.T) { {WSURL: s.URL}, }) require.NoError(t, err) - require.Equal(t, len(mc.Backups), 1) + require.Len(t, mc.Backups, 1) } diff --git a/integration-tests/.golangci.yml b/integration-tests/.golangci.yml index 337555e17cb..957d11e04ff 100644 --- a/integration-tests/.golangci.yml +++ b/integration-tests/.golangci.yml @@ -8,7 +8,6 @@ linters: - errname - errorlint - exhaustive - - exportloopref - fatcontext - ginkgolinter - gocritic From 7debe85cc458774c0d94c8d2221a9cb17679fbff Mon Sep 17 00:00:00 2001 From: Dmytro Haidashenko <34754799+dhaidashenko@users.noreply.github.com> Date: Tue, 7 Jan 2025 23:29:55 +0100 Subject: [PATCH 3/8] BCFR-1099 sei custom log index (#15858) * add sei chain and error mapping * fix changeset and config_test * remove sei chain type * add pricemax * custom calculation of log's index for Sei * fix lint issues & tests --------- Co-authored-by: flodesi --- .changeset/clever-knives-tap.md | 5 + ccip/config/evm/Sei_Testnet_Atlantic.toml | 18 +++ core/build/platform_arch_guard.go | 3 + core/chains/evm/client/errors.go | 12 +- core/chains/evm/client/errors_test.go | 6 + core/chains/evm/client/helpers_test.go | 8 +- core/chains/evm/client/rpc_client.go | 51 ++++++- .../evm/client/rpc_client_internal_test.go | 93 ++++++++++++ core/chains/evm/client/rpc_client_test.go | 137 +++++++++++++++++- core/chains/evm/client/sub_forwarder.go | 30 ++-- core/chains/evm/client/sub_forwarder_test.go | 48 ++++-- core/chains/evm/config/chaintype/chaintype.go | 6 +- core/services/chainlink/config_test.go | 4 +- core/services/ocr/contract_tracker.go | 2 +- core/services/ocrcommon/block_translator.go | 2 +- 15 files changed, 383 insertions(+), 42 deletions(-) create mode 100644 .changeset/clever-knives-tap.md create mode 100644 ccip/config/evm/Sei_Testnet_Atlantic.toml create mode 100644 core/build/platform_arch_guard.go create mode 100644 core/chains/evm/client/rpc_client_internal_test.go diff --git a/.changeset/clever-knives-tap.md b/.changeset/clever-knives-tap.md new file mode 100644 index 00000000000..8683e89f77d --- /dev/null +++ b/.changeset/clever-knives-tap.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#added Sei config and error mapping diff --git a/ccip/config/evm/Sei_Testnet_Atlantic.toml b/ccip/config/evm/Sei_Testnet_Atlantic.toml new file mode 100644 index 00000000000..f8c23d95c54 --- /dev/null +++ b/ccip/config/evm/Sei_Testnet_Atlantic.toml @@ -0,0 +1,18 @@ +ChainID = '1328' +ChainType = 'sei' +# finality_depth: instant +FinalityDepth = 10 +# block_time: ~0.4s, adding 1 second buffer +LogPollInterval = '2s' +# finality_depth * block_time / 60 secs = ~0.8 min (finality time) +NoNewFinalizedHeadsThreshold = '5m' +# "RPC node returned multiple missing blocks on query for block numbers [31592085 31592084] even though the WS subscription already sent us these blocks. It might help to increase EVM.RPCBlockQueryDelay (currently 1)" +RPCBlockQueryDelay = 5 + +[GasEstimator] +EIP1559DynamicFees = false +Mode = 'BlockHistory' +PriceMax = '3000 gwei' # recommended by ds&a + +[GasEstimator.BlockHistory] +BlockHistorySize = 200 diff --git a/core/build/platform_arch_guard.go b/core/build/platform_arch_guard.go new file mode 100644 index 00000000000..3a22f7df537 --- /dev/null +++ b/core/build/platform_arch_guard.go @@ -0,0 +1,3 @@ +//go:build !amd64 && !arm64 +package build +"non-64-bits architectures are not supported" diff --git a/core/chains/evm/client/errors.go b/core/chains/evm/client/errors.go index bde97185580..eaa33f041ac 100644 --- a/core/chains/evm/client/errors.go +++ b/core/chains/evm/client/errors.go @@ -284,6 +284,16 @@ var gnosis = ClientErrors{ TransactionAlreadyInMempool: regexp.MustCompile(`(: |^)(alreadyknown)`), } +var sei = ClientErrors{ + // https://github.com/sei-protocol/sei-tendermint/blob/e9a22c961e83579d8a68cd045c532980d82fb2a0/types/mempool.go#L12 + TransactionAlreadyInMempool: regexp.MustCompile("tx already exists in cache"), + // https://github.com/sei-protocol/sei-cosmos/blob/a4eb451c957b1ca7ca9118406682f93fe83d1f61/types/errors/errors.go#L50 + // https://github.com/sei-protocol/sei-cosmos/blob/a4eb451c957b1ca7ca9118406682f93fe83d1f61/types/errors/errors.go#L56 + // https://github.com/sei-protocol/sei-cosmos/blob/a4eb451c957b1ca7ca9118406682f93fe83d1f61/client/broadcast.go#L27 + // https://github.com/sei-protocol/sei-cosmos/blob/a4eb451c957b1ca7ca9118406682f93fe83d1f61/types/errors/errors.go#L32 + Fatal: regexp.MustCompile(`(: |^)'*out of gas|insufficient fee|Tx too large. Max size is \d+, but got \d+|: insufficient funds`), +} + const TerminallyStuckMsg = "transaction terminally stuck" // Tx.Error messages that are set internally so they are not chain or client specific @@ -291,7 +301,7 @@ var internal = ClientErrors{ TerminallyStuck: regexp.MustCompile(TerminallyStuckMsg), } -var clients = []ClientErrors{parity, geth, arbitrum, metis, substrate, avalanche, nethermind, harmony, besu, erigon, klaytn, celo, zkSync, zkEvm, treasure, mantle, aStar, hedera, gnosis, internal} +var clients = []ClientErrors{parity, geth, arbitrum, metis, substrate, avalanche, nethermind, harmony, besu, erigon, klaytn, celo, zkSync, zkEvm, treasure, mantle, aStar, hedera, gnosis, sei, internal} // ClientErrorRegexes returns a map of compiled regexes for each error type func ClientErrorRegexes(errsRegex config.ClientErrors) *ClientErrors { diff --git a/core/chains/evm/client/errors_test.go b/core/chains/evm/client/errors_test.go index 1f9aaa53365..7ba042ab5c6 100644 --- a/core/chains/evm/client/errors_test.go +++ b/core/chains/evm/client/errors_test.go @@ -143,6 +143,7 @@ func Test_Eth_Errors(t *testing.T) { {"ErrorObject { code: ServerError(3), message: \\\"known transaction. transaction with hash 0xf016…ad63 is already in the system\\\", data: Some(RawValue(\\\"0x\\\")) }", true, "zkSync"}, {"client error transaction already in mempool", true, "tomlConfig"}, {"alreadyknown", true, "Gnosis"}, + {"tx already exists in cache", true, "Sei"}, } for _, test := range tests { err = evmclient.NewSendErrorS(test.message) @@ -442,6 +443,11 @@ func Test_Eth_Errors_Fatal(t *testing.T) { {"client error fatal", true, "tomlConfig"}, {"[Request ID: d9711488-4c1e-4af2-bc1f-7969913d7b60] Error invoking RPC: transaction 0.0.4425573@1718213476.914320044 failed precheck with status INVALID_SIGNATURE", true, "hedera"}, {"invalid chain id for signer", true, "Treasure"}, + + {": out of gas", true, "Sei"}, + {"Tx too large. Max size is 2048576, but got 2097431", true, "Sei"}, + {": insufficient funds", true, "Sei"}, + {"insufficient fee", true, "Sei"}, } for _, test := range tests { diff --git a/core/chains/evm/client/helpers_test.go b/core/chains/evm/client/helpers_test.go index f9751be765c..6369c9dca12 100644 --- a/core/chains/evm/client/helpers_test.go +++ b/core/chains/evm/client/helpers_test.go @@ -4,6 +4,7 @@ import ( "fmt" "math/big" "net/url" + "sync" "testing" "time" @@ -216,6 +217,7 @@ const HeadResult = `{"difficulty":"0xf3a00","extraData":"0xd88301050384676574688 type mockSubscription struct { unsubscribed bool Errors chan error + unsub sync.Once } func NewMockSubscription() *mockSubscription { @@ -225,8 +227,10 @@ func NewMockSubscription() *mockSubscription { func (mes *mockSubscription) Err() <-chan error { return mes.Errors } func (mes *mockSubscription) Unsubscribe() { - mes.unsubscribed = true - close(mes.Errors) + mes.unsub.Do(func() { + mes.unsubscribed = true + close(mes.Errors) + }) } func ParseTestNodeConfigs(nodes []NodeConfig) ([]*toml.Node, error) { diff --git a/core/chains/evm/client/rpc_client.go b/core/chains/evm/client/rpc_client.go index 97046b4eff2..35d2a6dcd6b 100644 --- a/core/chains/evm/client/rpc_client.go +++ b/core/chains/evm/client/rpc_client.go @@ -5,6 +5,7 @@ import ( "encoding/json" "errors" "fmt" + "math" "math/big" "net/url" "strconv" @@ -376,6 +377,10 @@ func (r *RPCClient) BatchCallContext(rootCtx context.Context, b []rpc.BatchElem) var requestedFinalizedBlock bool if r.chainType == chaintype.ChainAstar { for _, el := range b { + if el.Method == "eth_getLogs" { + r.rpcLog.Critical("evmclient.BatchCallContext: eth_getLogs is not supported") + return errors.New("evmclient.BatchCallContext: eth_getLogs is not supported") + } if !isRequestingFinalizedBlock(el) { continue } @@ -490,10 +495,10 @@ func (r *RPCClient) SubscribeToHeads(ctx context.Context) (ch <-chan *evmtypes.H }() channel := make(chan *evmtypes.Head) - forwarder := newSubForwarder(channel, func(head *evmtypes.Head) *evmtypes.Head { + forwarder := newSubForwarder(channel, func(head *evmtypes.Head) (*evmtypes.Head, error) { head.EVMChainID = ubig.New(r.chainID) r.onNewHead(ctx, chStopInFlight, head) - return head + return head, nil }, r.wrapRPCClientError) err = forwarder.start(ws.rpc.EthSubscribe(ctx, forwarder.srcCh, args...)) @@ -1199,8 +1204,11 @@ func (r *RPCClient) FilterLogs(ctx context.Context, q ethereum.FilterQuery) (l [ l, err = ws.geth.FilterLogs(ctx, q) err = r.wrapWS(err) } - duration := time.Since(start) + if err == nil { + err = r.makeLogsValid(l) + } + duration := time.Since(start) r.logResult(lggr, err, duration, r.getRPCDomain(), "FilterLogs", "log", l, ) @@ -1228,7 +1236,7 @@ func (r *RPCClient) SubscribeFilterLogs(ctx context.Context, q ethereum.FilterQu r.logResult(lggr, err, duration, r.getRPCDomain(), "SubscribeFilterLogs") err = r.wrapWS(err) }() - sub := newSubForwarder(ch, nil, r.wrapRPCClientError) + sub := newSubForwarder(ch, r.makeLogValid, r.wrapRPCClientError) err = sub.start(ws.geth.SubscribeFilterLogs(ctx, q, sub.srcCh)) if err != nil { return @@ -1452,3 +1460,38 @@ func ToBlockNumArg(number *big.Int) string { } return hexutil.EncodeBig(number) } + +func (r *RPCClient) makeLogsValid(logs []types.Log) error { + if r.chainType != chaintype.ChainSei { + return nil + } + + for i := range logs { + var err error + logs[i], err = r.makeLogValid(logs[i]) + if err != nil { + return err + } + } + + return nil +} + +func (r *RPCClient) makeLogValid(log types.Log) (types.Log, error) { + if r.chainType != chaintype.ChainSei { + return log, nil + } + + if log.TxIndex > math.MaxUint32 { + return types.Log{}, fmt.Errorf("TxIndex of tx %s exceeds max supported value of %d", log.TxHash, math.MaxUint32) + } + + if log.Index > math.MaxUint32 { + return types.Log{}, fmt.Errorf("log's index %d of tx %s exceeds max supported value of %d", log.Index, log.TxHash, math.MaxUint32) + } + + // it's safe as we have a build guard to guarantee 64-bit system + newIndex := uint64(log.TxIndex<<32) | uint64(log.Index) + log.Index = uint(newIndex) + return log, nil +} diff --git a/core/chains/evm/client/rpc_client_internal_test.go b/core/chains/evm/client/rpc_client_internal_test.go new file mode 100644 index 00000000000..ef321645fc2 --- /dev/null +++ b/core/chains/evm/client/rpc_client_internal_test.go @@ -0,0 +1,93 @@ +package client + +import ( + "errors" + "math" + "testing" + + ethtypes "github.com/ethereum/go-ethereum/core/types" + "github.com/stretchr/testify/require" + + commonclient "github.com/smartcontractkit/chainlink/v2/common/client" + "github.com/smartcontractkit/chainlink/v2/core/chains/evm/config/chaintype" + "github.com/smartcontractkit/chainlink/v2/core/logger" +) + +func TestRPCClient_MakeLogsValid(t *testing.T) { + testCases := []struct { + Name string + TxIndex uint + LogIndex uint + ExpectedLogIndex uint + ExpectedError error + }{ + { + Name: "TxIndex = 0 LogIndex = 0", + TxIndex: 0, + LogIndex: 0, + ExpectedLogIndex: 0, + ExpectedError: nil, + }, + { + Name: "TxIndex = 0 LogIndex = 1", + TxIndex: 0, + LogIndex: 1, + ExpectedLogIndex: 1, + ExpectedError: nil, + }, + { + Name: "TxIndex = 0 LogIndex = MaxUint32", + TxIndex: 0, + LogIndex: math.MaxUint32, + ExpectedLogIndex: math.MaxUint32, + ExpectedError: nil, + }, + { + Name: "LogIndex = MaxUint32 + 1 => returns an error", + TxIndex: 0, + LogIndex: math.MaxUint32 + 1, + ExpectedLogIndex: 0, + ExpectedError: errors.New("log's index 4294967296 of tx 0x0000000000000000000000000000000000000000000000000000000000000000 exceeds max supported value of 4294967295"), + }, + { + Name: "TxIndex = 1 LogIndex = 0", + TxIndex: 1, + LogIndex: 0, + ExpectedLogIndex: math.MaxUint32 + 1, + ExpectedError: nil, + }, + { + Name: "TxIndex = MaxUint32 LogIndex = MaxUint32", + TxIndex: math.MaxUint32, + LogIndex: math.MaxUint32, + ExpectedLogIndex: math.MaxUint64, + ExpectedError: nil, + }, + { + Name: "TxIndex = MaxUint32 + 1 => returns an error", + TxIndex: math.MaxUint32 + 1, + LogIndex: 0, + ExpectedLogIndex: 0, + ExpectedError: errors.New("TxIndex of tx 0x0000000000000000000000000000000000000000000000000000000000000000 exceeds max supported value of 4294967295"), + }, + } + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + rpc := NewRPCClient(TestNodePoolConfig{}, logger.TestLogger(t), nil, nil, "eth-primary-rpc-0", 0, nil, commonclient.Primary, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + log, err := rpc.makeLogValid(ethtypes.Log{TxIndex: tc.TxIndex, Index: tc.LogIndex}) + // non sei should return as is + require.NoError(t, err) + require.Equal(t, tc.TxIndex, log.TxIndex) + require.Equal(t, tc.LogIndex, log.Index) + seiRPC := NewRPCClient(TestNodePoolConfig{}, logger.TestLogger(t), nil, nil, "eth-primary-rpc-0", 0, nil, commonclient.Primary, commonclient.QueryTimeout, commonclient.QueryTimeout, chaintype.ChainSei) + log, err = seiRPC.makeLogValid(ethtypes.Log{TxIndex: tc.TxIndex, Index: tc.LogIndex}) + if tc.ExpectedError != nil { + require.EqualError(t, err, tc.ExpectedError.Error()) + return + } + + require.Equal(t, tc.ExpectedLogIndex, log.Index) + require.Equal(t, tc.TxIndex, log.TxIndex) + }) + } +} diff --git a/core/chains/evm/client/rpc_client_test.go b/core/chains/evm/client/rpc_client_test.go index d5286e9acf0..f6e7f9ee338 100644 --- a/core/chains/evm/client/rpc_client_test.go +++ b/core/chains/evm/client/rpc_client_test.go @@ -5,6 +5,7 @@ import ( "encoding/json" "errors" "fmt" + "math" "math/big" "net/url" "sync" @@ -13,6 +14,7 @@ import ( "time" "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/rpc" "github.com/stretchr/testify/assert" @@ -31,14 +33,16 @@ import ( evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" ) -func makeNewHeadWSMessage(head *evmtypes.Head) string { - asJSON, err := json.Marshal(head) +func makeNewWSMessage[T any](v T) string { + asJSON, err := json.Marshal(v) if err != nil { panic(fmt.Errorf("failed to marshal head: %w", err)) } return fmt.Sprintf(`{"jsonrpc":"2.0","method":"eth_subscription","params":{"subscription":"0x00","result":%s}}`, string(asJSON)) } +var makeNewHeadWSMessage = makeNewWSMessage[*evmtypes.Head] + func TestRPCClient_SubscribeToHeads(t *testing.T) { t.Parallel() ctx, cancel := context.WithTimeout(tests.Context(t), tests.WaitTimeout(t)) @@ -385,6 +389,135 @@ func TestRPCClient_SubscribeFilterLogs(t *testing.T) { t.Errorf("Expected subscription to return an error, but test timeout instead") } }) + t.Run("Log's index is properly set for Sei chain type", func(t *testing.T) { + server := testutils.NewWSServer(t, chainId, func(method string, params gjson.Result) (resp testutils.JSONRPCResponse) { + if method == "eth_unsubscribe" { + resp.Result = "true" + return + } else if method == "eth_subscribe" { + if assert.True(t, params.IsArray()) && assert.Equal(t, "logs", params.Array()[0].String()) { + resp.Result = `"0x00"` + } + return + } + return + }) + wsURL := server.WSURL() + rpc := client.NewRPCClient(nodePoolCfg, lggr, wsURL, nil, "rpc", 1, chainId, commonclient.Primary, commonclient.QueryTimeout, commonclient.QueryTimeout, chaintype.ChainSei) + defer rpc.Close() + require.NoError(t, rpc.Dial(ctx)) + ch := make(chan types.Log) + sub, err := rpc.SubscribeFilterLogs(ctx, ethereum.FilterQuery{}, ch) + require.NoError(t, err) + testCases := []struct { + TxIndex uint + Index uint + ExpectedIndex uint + }{ + { + TxIndex: 0, + Index: 0, + ExpectedIndex: 0, + }, + { + TxIndex: 0, + Index: 1, + ExpectedIndex: 1, + }, + { + TxIndex: 1, + Index: 0, + ExpectedIndex: math.MaxUint32 + 1, + }, + } + go func() { + for _, testCase := range testCases { + server.MustWriteBinaryMessageSync(t, makeNewWSMessage(types.Log{TxIndex: testCase.TxIndex, Index: testCase.Index, Topics: []common.Hash{{}}})) + } + }() + defer sub.Unsubscribe() + for _, testCase := range testCases { + select { + case <-tests.Context(t).Done(): + require.Fail(t, "context timed out") + case err := <-sub.Err(): + require.NoError(t, err) + require.Fail(t, "Did not expect error channel to be closed or return error before all testcases were consumed") + case log := <-ch: + require.Equal(t, testCase.ExpectedIndex, log.Index, "Unexpected log index %d for test case %v", log.Index, testCase) + } + } + }) +} + +func TestRPCClientFilterLogs(t *testing.T) { + t.Parallel() + + nodePoolCfg := client.TestNodePoolConfig{ + NodeNewHeadsPollInterval: 1 * time.Second, + NodeFinalizedBlockPollInterval: 1 * time.Second, + } + + chainID := big.NewInt(123456) + lggr := logger.Test(t) + ctx, cancel := context.WithTimeout(tests.Context(t), tests.WaitTimeout(t)) + defer cancel() + t.Run("Log's index is properly set for Sei chain type", func(t *testing.T) { + testCases := []struct { + TxIndex uint + Index uint + ExpectedIndex uint + }{ + { + TxIndex: 0, + Index: 0, + ExpectedIndex: 0, + }, + { + TxIndex: 0, + Index: 1, + ExpectedIndex: 1, + }, + { + TxIndex: 1, + Index: 0, + ExpectedIndex: math.MaxUint32 + 1, + }, + } + server := testutils.NewWSServer(t, chainID, func(method string, params gjson.Result) (resp testutils.JSONRPCResponse) { + if method != "eth_getLogs" { + return + } + var logs []types.Log + for _, testCase := range testCases { + logs = append(logs, types.Log{TxIndex: testCase.TxIndex, Index: testCase.Index, Topics: []common.Hash{{}}}) + } + raw, err := json.Marshal(logs) + require.NoError(t, err) + resp.Result = string(raw) + return + }) + wsURL := server.WSURL() + seiRPC := client.NewRPCClient(nodePoolCfg, lggr, wsURL, nil, "rpc", 1, chainID, commonclient.Primary, commonclient.QueryTimeout, commonclient.QueryTimeout, chaintype.ChainSei) + defer seiRPC.Close() + require.NoError(t, seiRPC.Dial(ctx)) + logs, err := seiRPC.FilterLogs(ctx, ethereum.FilterQuery{}) + require.NoError(t, err) + for i, testCase := range testCases { + require.Equal(t, testCase.ExpectedIndex, logs[i].Index, "Unexpected log index %d for test case %v", logs[i].Index, testCase) + } + + // non sei should return index as is + rpc := client.NewRPCClient(nodePoolCfg, lggr, wsURL, nil, "rpc", 1, chainID, commonclient.Primary, commonclient.QueryTimeout, commonclient.QueryTimeout, "") + defer rpc.Close() + require.NoError(t, rpc.Dial(ctx)) + logs, err = rpc.FilterLogs(ctx, ethereum.FilterQuery{}) + require.NoError(t, err) + for i, testCase := range testCases { + require.Equal(t, testCase.Index, logs[i].Index, "Expected non sei log to be returned as is") + require.Equal(t, testCase.TxIndex, logs[i].TxIndex, "Expected non sei log to be returned as is") + } + }) } func TestRPCClient_LatestFinalizedBlock(t *testing.T) { diff --git a/core/chains/evm/client/sub_forwarder.go b/core/chains/evm/client/sub_forwarder.go index 93e9b106b4a..a9b5a97eee0 100644 --- a/core/chains/evm/client/sub_forwarder.go +++ b/core/chains/evm/client/sub_forwarder.go @@ -13,7 +13,7 @@ type subForwarder[T any] struct { srcCh chan T srcSub ethereum.Subscription - interceptResult func(T) T + interceptResult func(T) (T, error) interceptError func(error) error done chan struct{} @@ -21,14 +21,14 @@ type subForwarder[T any] struct { unSub chan struct{} } -func newSubForwarder[T any](destCh chan<- T, interceptResult func(T) T, interceptError func(error) error) *subForwarder[T] { +func newSubForwarder[T any](destCh chan<- T, interceptResult func(T) (T, error), interceptError func(error) error) *subForwarder[T] { return &subForwarder[T]{ interceptResult: interceptResult, interceptError: interceptError, destCh: destCh, srcCh: make(chan T), done: make(chan struct{}), - err: make(chan error), + err: make(chan error, 1), unSub: make(chan struct{}, 1), } } @@ -44,6 +44,14 @@ func (c *subForwarder[T]) start(sub ethereum.Subscription, err error) error { return nil } +func (c *subForwarder[T]) handleError(err error) { + if c.interceptError != nil { + err = c.interceptError(err) + } + c.err <- err // err is buffered, and we never write twice, so write is not blocking + c.srcSub.Unsubscribe() +} + // forwardLoop receives from src, adds the chainID, and then sends to dest. // It also handles Unsubscribing, which may interrupt either forwarding operation. func (c *subForwarder[T]) forwardLoop() { @@ -54,19 +62,17 @@ func (c *subForwarder[T]) forwardLoop() { for { select { case err := <-c.srcSub.Err(): - if c.interceptError != nil { - err = c.interceptError(err) - } - select { - case c.err <- err: - case <-c.unSub: - c.srcSub.Unsubscribe() - } + c.handleError(err) return case h := <-c.srcCh: if c.interceptResult != nil { - h = c.interceptResult(h) + var err error + h, err = c.interceptResult(h) + if err != nil { + c.handleError(err) + return + } } select { case c.destCh <- h: diff --git a/core/chains/evm/client/sub_forwarder_test.go b/core/chains/evm/client/sub_forwarder_test.go index 1bc0122603b..267fa1b8467 100644 --- a/core/chains/evm/client/sub_forwarder_test.go +++ b/core/chains/evm/client/sub_forwarder_test.go @@ -21,9 +21,9 @@ func TestChainIDSubForwarder(t *testing.T) { t.Parallel() newChainIDSubForwarder := func(chainID *big.Int, ch chan<- *evmtypes.Head) *subForwarder[*evmtypes.Head] { - return newSubForwarder(ch, func(head *evmtypes.Head) *evmtypes.Head { + return newSubForwarder(ch, func(head *evmtypes.Head) (*evmtypes.Head, error) { head.EVMChainID = ubig.New(chainID) - return head + return head, nil }, nil) } @@ -54,12 +54,14 @@ func TestChainIDSubForwarder(t *testing.T) { sub := NewMockSubscription() err := forwarder.start(sub, nil) assert.NoError(t, err) - sub.Errors <- errors.New("boo") + expectedError := errors.New("boo") + sub.Errors <- expectedError forwarder.Unsubscribe() assert.True(t, sub.unsubscribed) - _, ok := <-sub.Err() - assert.False(t, ok) + err, ok := <-forwarder.Err() + assert.True(t, ok) + require.ErrorIs(t, err, expectedError) _, ok = <-forwarder.Err() assert.False(t, ok) }) @@ -117,6 +119,31 @@ func TestChainIDSubForwarder(t *testing.T) { }) } +func TestSubscriptionForwarder(t *testing.T) { + t.Run("Error returned by interceptResult is forwarded to err channel", func(t *testing.T) { + t.Parallel() + + ch := make(chan *evmtypes.Head) + expectedErr := errors.New("something went wrong during result interception") + forwarder := newSubForwarder(ch, func(head *evmtypes.Head) (*evmtypes.Head, error) { + return nil, expectedErr + }, nil) + mockedSub := NewMockSubscription() + require.NoError(t, forwarder.start(mockedSub, nil)) + + head := &evmtypes.Head{ + ID: 1, + } + forwarder.srcCh <- head + err := <-forwarder.Err() + require.ErrorIs(t, err, expectedErr) + // ensure forwarder is closed + _, ok := <-forwarder.Err() + assert.False(t, ok) + assert.True(t, mockedSub.unsubscribed) + }) +} + func TestSubscriptionErrorWrapper(t *testing.T) { t.Parallel() newSubscriptionErrorWrapper := func(t *testing.T, sub commontypes.Subscription, errorPrefix string) ethereum.Subscription { @@ -145,17 +172,6 @@ func TestSubscriptionErrorWrapper(t *testing.T) { // subsequence unsubscribe does not causes panic wrapper.Unsubscribe() }) - t.Run("Unsubscribe interrupts error delivery", func(t *testing.T) { - t.Parallel() - sub := NewMockSubscription() - const prefix = "RPC returned error" - wrapper := newSubscriptionErrorWrapper(t, sub, prefix) - sub.Errors <- fmt.Errorf("error") - - wrapper.Unsubscribe() - _, ok := <-wrapper.Err() - assert.False(t, ok) - }) t.Run("Successfully wraps error", func(t *testing.T) { t.Parallel() sub := NewMockSubscription() diff --git a/core/chains/evm/config/chaintype/chaintype.go b/core/chains/evm/config/chaintype/chaintype.go index b2eff02834b..be3afa0ea62 100644 --- a/core/chains/evm/config/chaintype/chaintype.go +++ b/core/chains/evm/config/chaintype/chaintype.go @@ -17,6 +17,7 @@ const ( ChainMantle ChainType = "mantle" ChainMetis ChainType = "metis" ChainOptimismBedrock ChainType = "optimismBedrock" + ChainSei ChainType = "sei" ChainScroll ChainType = "scroll" ChainWeMix ChainType = "wemix" ChainXLayer ChainType = "xlayer" @@ -39,7 +40,7 @@ func (c ChainType) IsL2() bool { func (c ChainType) IsValid() bool { switch c { - case "", ChainArbitrum, ChainAstar, ChainCelo, ChainGnosis, ChainHedera, ChainKroma, ChainMantle, ChainMetis, ChainOptimismBedrock, ChainScroll, ChainWeMix, ChainXLayer, ChainZkEvm, ChainZkSync, ChainZircuit: + case "", ChainArbitrum, ChainAstar, ChainCelo, ChainGnosis, ChainHedera, ChainKroma, ChainMantle, ChainMetis, ChainOptimismBedrock, ChainSei, ChainScroll, ChainWeMix, ChainXLayer, ChainZkEvm, ChainZkSync, ChainZircuit: return true } return false @@ -65,6 +66,8 @@ func FromSlug(slug string) ChainType { return ChainMetis case "optimismBedrock": return ChainOptimismBedrock + case "sei": + return ChainSei case "scroll": return ChainScroll case "wemix": @@ -138,6 +141,7 @@ var ErrInvalid = fmt.Errorf("must be one of %s or omitted", strings.Join([]strin string(ChainMantle), string(ChainMetis), string(ChainOptimismBedrock), + string(ChainSei), string(ChainScroll), string(ChainWeMix), string(ChainXLayer), diff --git a/core/services/chainlink/config_test.go b/core/services/chainlink/config_test.go index 65ece5a88c0..9a08b356c66 100644 --- a/core/services/chainlink/config_test.go +++ b/core/services/chainlink/config_test.go @@ -1472,7 +1472,7 @@ func TestConfig_Validate(t *testing.T) { - 1: 10 errors: - ChainType: invalid value (Foo): must not be set with this chain id - Nodes: missing: must have at least one node - - ChainType: invalid value (Foo): must be one of arbitrum, astar, celo, gnosis, hedera, kroma, mantle, metis, optimismBedrock, scroll, wemix, xlayer, zkevm, zksync, zircuit or omitted + - ChainType: invalid value (Foo): must be one of arbitrum, astar, celo, gnosis, hedera, kroma, mantle, metis, optimismBedrock, sei, scroll, wemix, xlayer, zkevm, zksync, zircuit or omitted - HeadTracker.HistoryDepth: invalid value (30): must be greater than or equal to FinalizedBlockOffset - GasEstimator.BumpThreshold: invalid value (0): cannot be 0 if auto-purge feature is enabled for Foo - Transactions.AutoPurge.Threshold: missing: needs to be set if auto-purge feature is enabled for Foo @@ -1485,7 +1485,7 @@ func TestConfig_Validate(t *testing.T) { - 2: 5 errors: - ChainType: invalid value (Arbitrum): only "optimismBedrock" can be used with this chain id - Nodes: missing: must have at least one node - - ChainType: invalid value (Arbitrum): must be one of arbitrum, astar, celo, gnosis, hedera, kroma, mantle, metis, optimismBedrock, scroll, wemix, xlayer, zkevm, zksync, zircuit or omitted + - ChainType: invalid value (Arbitrum): must be one of arbitrum, astar, celo, gnosis, hedera, kroma, mantle, metis, optimismBedrock, sei, scroll, wemix, xlayer, zkevm, zksync, zircuit or omitted - FinalityDepth: invalid value (0): must be greater than or equal to 1 - MinIncomingConfirmations: invalid value (0): must be greater than or equal to 1 - 3: 3 errors: diff --git a/core/services/ocr/contract_tracker.go b/core/services/ocr/contract_tracker.go index 618567f0bdb..f2cf1fee9d3 100644 --- a/core/services/ocr/contract_tracker.go +++ b/core/services/ocr/contract_tracker.go @@ -399,7 +399,7 @@ func (t *OCRContractTracker) LatestBlockHeight(ctx context.Context) (blockheight // care about the block height; we have no way of getting the L1 block // height anyway return 0, nil - case "", chaintype.ChainArbitrum, chaintype.ChainAstar, chaintype.ChainCelo, chaintype.ChainGnosis, chaintype.ChainHedera, chaintype.ChainKroma, chaintype.ChainOptimismBedrock, chaintype.ChainScroll, chaintype.ChainWeMix, chaintype.ChainXLayer, chaintype.ChainZkEvm, chaintype.ChainZkSync, chaintype.ChainZircuit: + case "", chaintype.ChainArbitrum, chaintype.ChainAstar, chaintype.ChainCelo, chaintype.ChainGnosis, chaintype.ChainHedera, chaintype.ChainKroma, chaintype.ChainOptimismBedrock, chaintype.ChainSei, chaintype.ChainScroll, chaintype.ChainWeMix, chaintype.ChainXLayer, chaintype.ChainZkEvm, chaintype.ChainZkSync, chaintype.ChainZircuit: // continue } latestBlockHeight := t.getLatestBlockHeight() diff --git a/core/services/ocrcommon/block_translator.go b/core/services/ocrcommon/block_translator.go index b25d617e2ab..8a755f767b9 100644 --- a/core/services/ocrcommon/block_translator.go +++ b/core/services/ocrcommon/block_translator.go @@ -22,7 +22,7 @@ func NewBlockTranslator(cfg Config, client evmclient.Client, lggr logger.Logger) switch cfg.ChainType() { case chaintype.ChainArbitrum: return NewArbitrumBlockTranslator(client, lggr) - case "", chaintype.ChainCelo, chaintype.ChainGnosis, chaintype.ChainKroma, chaintype.ChainMetis, chaintype.ChainOptimismBedrock, chaintype.ChainScroll, chaintype.ChainWeMix, chaintype.ChainXLayer, chaintype.ChainZkEvm, chaintype.ChainZkSync, chaintype.ChainZircuit: + case "", chaintype.ChainCelo, chaintype.ChainGnosis, chaintype.ChainKroma, chaintype.ChainMetis, chaintype.ChainOptimismBedrock, chaintype.ChainSei, chaintype.ChainScroll, chaintype.ChainWeMix, chaintype.ChainXLayer, chaintype.ChainZkEvm, chaintype.ChainZkSync, chaintype.ChainZircuit: fallthrough default: return &l1BlockTranslator{} From 9bcb3db1fb7eb3ca942ebaa34f3db240bb6f57fd Mon Sep 17 00:00:00 2001 From: Lukasz <120112546+lukaszcl@users.noreply.github.com> Date: Wed, 8 Jan 2025 10:01:32 +0100 Subject: [PATCH 4/8] Flakeguard: Unskip TestChainComponents test (#15851) * Flakeguard: Unskip TestChainComponents test * fail test * Revert "fail test" This reverts commit c4ae8ccb71b01407c8c13c6872c5a28697e21e5f. --- .github/workflows/ci-flakeguard.yml | 2 +- .github/workflows/flakeguard-nightly.yml | 2 +- .github/workflows/flakeguard-on-demand.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-flakeguard.yml b/.github/workflows/ci-flakeguard.yml index caf6a62a3fb..1bd60b20c94 100644 --- a/.github/workflows/ci-flakeguard.yml +++ b/.github/workflows/ci-flakeguard.yml @@ -41,7 +41,7 @@ jobs: findByTestFilesDiff: true findByAffectedPackages: false slackNotificationAfterTestsChannelId: 'C07TRF65CNS' #flaky-test-detector-notifications - extraArgs: '{ "skipped_tests": "TestChainComponents", "run_with_race": "true", "print_failed_tests": "true", "test_repeat_count": "3", "omit_test_outputs_on_success": "true" }' + extraArgs: '{ "skipped_tests": "", "run_with_race": "true", "print_failed_tests": "true", "test_repeat_count": "3", "omit_test_outputs_on_success": "true" }' secrets: SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/flakeguard-nightly.yml b/.github/workflows/flakeguard-nightly.yml index 025cca6d0a0..3d62f1f521d 100644 --- a/.github/workflows/flakeguard-nightly.yml +++ b/.github/workflows/flakeguard-nightly.yml @@ -16,7 +16,7 @@ jobs: projectPath: '.' maxPassRatio: '1.0' runAllTests: true - extraArgs: '{ "skipped_tests": "TestChainComponents", "test_repeat_count": "5", "all_tests_runner": "ubuntu22.04-32cores-128GB", "all_tests_runner_count": "3", "run_with_race": "false" }' + extraArgs: '{ "skipped_tests": "", "test_repeat_count": "5", "all_tests_runner": "ubuntu22.04-32cores-128GB", "all_tests_runner_count": "3", "run_with_race": "false" }' slackNotificationAfterTestsChannelId: 'C07TRF65CNS' #flaky-test-detector-notifications secrets: SLACK_BOT_TOKEN: ${{ secrets.QA_SLACK_API_KEY }} diff --git a/.github/workflows/flakeguard-on-demand.yml b/.github/workflows/flakeguard-on-demand.yml index f6df40616f7..a8a71be3ba2 100644 --- a/.github/workflows/flakeguard-on-demand.yml +++ b/.github/workflows/flakeguard-on-demand.yml @@ -48,7 +48,7 @@ on: extraArgs: required: false type: string - default: '{ "skipped_tests": "TestChainComponents", "test_repeat_count": "5", "all_tests_runner": "ubuntu22.04-32cores-128GB", "all_tests_runner_count": "3", "run_with_race": "false" }' + default: '{ "skipped_tests": "", "test_repeat_count": "5", "all_tests_runner": "ubuntu22.04-32cores-128GB", "all_tests_runner_count": "3", "run_with_race": "false" }' description: 'JSON of extra arguments for the workflow.' jobs: From fcefd62068d6be4fea1820d1a9edef4e16fbfa3b Mon Sep 17 00:00:00 2001 From: Rens Rooimans Date: Wed, 8 Jan 2025 16:16:15 +0100 Subject: [PATCH 5/8] Remove old Solidity code & move misplaced files (#15852) * rm dead code * move ChainSpecificUtil_v0_8_6.sol to vrf * remove flags * move chainlink client to operatorforwarder * move automation test wrappers to automation * more automation cleanup * move interfaces to operatorforwarder * remove TypeAndVersionInterface.sol in favor of ITypeAndVersion * move and remove mocks * move ChainSpecificUtil to shared * move more testhelpers * move MockV3Aggregator * move logpoller related contracts to shared * clean up broken references * fix broken ref * rebase * fix lint & compile feeds * gen wrapper for ITypeAndVersion * fix lint --- .changeset/cold-coats-battle.md | 5 + .github/CODEOWNERS | 3 - .../workflows/solidity-foundry-artifacts.yml | 1 - contracts/.changeset/angry-needles-approve.md | 5 + contracts/STYLE_GUIDE.md | 3 - contracts/scripts/lcov_prune | 2 - contracts/scripts/native_solc_compile_all | 2 +- .../native_solc_compile_all_automation | 10 +- .../scripts/native_solc_compile_all_feeds | 4 +- .../scripts/native_solc_compile_all_logpoller | 33 -- .../scripts/native_solc_compile_all_shared | 7 +- contracts/src/v0.8/Denominations.sol | 28 -- contracts/src/v0.8/Flags.sol | 124 ----- .../src/v0.8/PermissionedForwardProxy.sol | 65 --- contracts/src/v0.8/ValidatorProxy.sol | 230 --------- .../v0.8/automation/HeartbeatRequester.sol | 4 +- .../src/v0.8/automation/UpkeepTranscoder.sol | 4 +- .../v0.8/automation/dev/MercuryRegistry.sol | 2 +- .../mocks}/MockArbGasInfo.sol | 0 .../{ => automation}/mocks/MockArbSys.sol | 0 .../mocks}/MockGasBoundCaller.sol | 0 .../mocks}/MockZKSyncSystemContext.sol | 0 .../v0.8/automation/test/v2_3/BaseTest.t.sol | 2 +- .../test/v2_3_zksync/BaseTest.t.sol | 6 +- .../AutomationConsumerBenchmark.sol | 0 .../testhelpers}/CronReceiver.sol | 0 .../ERC20BalanceMonitorExposed.sol | 2 +- .../testhelpers}/EthBalanceMonitorExposed.sol | 2 +- .../KeeperCompatibleTestHelper.sol | 2 +- .../testhelpers}/MockOVMGasPriceOracle.sol | 0 .../testhelpers}/ReceiveEmitter.sol | 0 .../testhelpers}/ReceiveFallbackEmitter.sol | 0 .../testhelpers}/ReceiveReverter.sol | 0 .../testhelpers}/StreamsLookupUpkeep.sol | 6 +- .../testhelpers}/VerifiableLoadBase.sol | 10 +- .../VerifiableLoadLogTriggerUpkeep.sol | 4 +- .../VerifiableLoadStreamsLookupUpkeep.sol | 2 +- .../testhelpers}/VerifiableLoadUpkeep.sol | 0 .../automation/v1_2/KeeperRegistrar1_2.sol | 4 +- .../automation/v1_2/KeeperRegistry1_2.sol | 4 +- .../automation/v1_3/KeeperRegistry1_3.sol | 4 +- .../automation/v2_0/KeeperRegistrar2_0.sol | 4 +- .../automation/v2_0/UpkeepTranscoder3_0.sol | 4 +- .../v2_1/AutomationRegistrar2_1.sol | 4 +- .../automation/v2_1/UpkeepTranscoder4_0.sol | 4 +- .../v2_3/AutomationRegistrar2_3.sol | 4 +- .../automation/v2_3/UpkeepTranscoder5_0.sol | 4 +- .../feeQuoter/FeeQuoter.getTokenPrice.t.sol | 2 +- .../FeeQuoter.getValidatedTokenPrice.t.sol | 2 +- .../ccip/test/feeQuoter/FeeQuoterSetup.t.sol | 2 +- .../src/v0.8/functions/tests/v1_X/Setup.t.sol | 4 +- .../tests/v1_X/testhelpers}/MockLinkToken.sol | 2 +- .../v0.8/interfaces/FeedRegistryInterface.sol | 124 ----- .../src/v0.8/interfaces/FlagsInterface.sol | 17 - .../src/v0.8/interfaces/PoRAddressList.sol | 29 -- .../interfaces/TypeAndVersionInterface.sol | 6 - .../{tests => l2ep/test}/FeedConsumer.sol | 3 +- .../src/v0.8/{tests => l2ep/test}/Greeter.sol | 3 +- .../test/mocks}/MockArbitrumInbox.sol | 4 +- .../MockOptimismL1CrossDomainMessenger.sol | 0 .../MockOptimismL2CrossDomainMessenger.sol | 0 .../src/v0.8/l2ep/test/v1_0_0/L2EPTest.t.sol | 2 +- .../ArbitrumCrossDomainForwarder.t.sol | 2 +- .../ArbitrumCrossDomainGovernor.t.sol | 2 +- .../ArbitrumSequencerUptimeFeed.t.sol | 2 +- .../v1_0_0/arbitrum/ArbitrumValidator.t.sol | 2 +- .../OptimismCrossDomainForwarder.t.sol | 2 +- .../OptimismCrossDomainGovernor.t.sol | 2 +- .../OptimismSequencerUptimeFeed.t.sol | 4 +- .../v1_0_0/optimism/OptimismValidator.t.sol | 4 +- .../scroll/ScrollCrossDomainForwarder.t.sol | 2 +- .../scroll/ScrollCrossDomainGovernor.t.sol | 2 +- .../shared/BaseSequencerUptimeFeed.t.sol | 2 +- .../src/v0.8/llo-feeds/v0.3.0/FeeManager.sol | 6 +- .../v0.8/llo-feeds/v0.3.0/RewardManager.sol | 6 +- .../src/v0.8/llo-feeds/v0.3.0/Verifier.sol | 6 +- .../v0.8/llo-feeds/v0.3.0/VerifierProxy.sol | 6 +- .../v0.4.0/DestinationFeeManager.sol | 6 +- .../v0.4.0/DestinationRewardManager.sol | 6 +- .../llo-feeds/v0.4.0/DestinationVerifier.sol | 6 +- .../v0.4.0/DestinationVerifierProxy.sol | 6 +- .../configuration/ChannelConfigStore.sol | 4 +- .../v0.5.0/configuration/Configurator.sol | 6 +- .../v0.8/mocks/MockAggregatorValidator.sol | 30 -- .../src/v0.8/mocks/MockOffchainAggregator.sol | 14 - .../{ => operatorforwarder}/Chainlink.sol | 4 +- .../ChainlinkClient.sol | 4 +- .../src/v0.8/operatorforwarder/Operator.sol | 4 +- .../interfaces/ChainlinkRequestInterface.sol | 0 .../interfaces/ENSInterface.sol | 0 .../interfaces/OperatorInterface.sol | 0 .../interfaces/OracleInterface.sol | 0 .../interfaces/PointerInterface.sol | 0 .../test}/Broken.sol | 1 + .../testhelpers/ChainlinkClientHelper.sol | 2 +- .../test/testhelpers/Chainlinked.sol | 2 +- .../test/testhelpers/Consumer.sol | 4 +- .../test/testhelpers/EmptyOracle.sol | 4 +- .../test/testhelpers/GasGuzzlingConsumer.sol | 2 +- .../MaliciousMultiWordConsumer.sol | 4 +- .../test/testhelpers/MaliciousRequester.sol | 2 +- .../test/testhelpers/MultiWordConsumer.sol | 4 +- .../mocks}/MockV3Aggregator.sol | 3 +- .../test/helpers}/LogEmitter.sol | 1 + .../test/helpers}/VRFLogEmitter.sol | 0 .../{ => shared/util}/ChainSpecificUtil.sol | 6 +- .../v0.8/tests/ChainlinkClientTestHelper.sol | 83 ---- .../src/v0.8/tests/ChainlinkTestHelper.sol | 57 --- contracts/src/v0.8/tests/Counter.sol | 26 - contracts/src/v0.8/tests/FlagsTestHelper.sol | 20 - .../src/v0.8/tests/MockETHLINKAggregator.sol | 44 -- .../src/v0.8/vrf/BatchBlockhashStore.sol | 2 +- .../{ => vrf}/ChainSpecificUtil_v0_8_6.sol | 6 +- contracts/src/v0.8/vrf/VRFCoordinatorV2.sol | 6 +- contracts/src/v0.8/vrf/VRFV2Wrapper.sol | 6 +- contracts/src/v0.8/vrf/dev/BlockhashStore.sol | 2 +- .../v0.8/vrf/dev/TrustedBlockhashStore.sol | 2 +- .../src/v0.8/vrf/dev/VRFV2PlusWrapper.sol | 4 +- .../testhelpers/VRFCoordinatorTestV2_5.sol | 2 +- .../VRFCoordinatorV2PlusUpgradedVersion.sol | 2 +- .../VRFV2PlusLoadTestWithMetrics.sol | 2 +- .../VRFV2PlusWrapperLoadTestConsumer.sol | 2 +- .../src/v0.8/vrf/test/ChainSpecificUtil.t.sol | 2 +- .../vrf/test/FixtureVRFCoordinatorV2_5.t.sol | 4 +- .../v0.8/vrf/test/VRFCoordinatorV2Mock.t.sol | 4 +- .../test/VRFCoordinatorV2Plus_Migration.t.sol | 4 +- .../vrf/test/VRFCoordinatorV2_5Mock.t.sol | 2 +- .../test/VRFCoordinatorV2_5_Arbitrum.t.sol | 4 +- .../test/VRFCoordinatorV2_5_Optimism.t.sol | 4 +- contracts/src/v0.8/vrf/test/VRFV2Plus.t.sol | 4 +- .../vrf/test/VRFV2PlusSubscriptionAPI.t.sol | 4 +- .../src/v0.8/vrf/test/VRFV2PlusWrapper.t.sol | 4 +- .../vrf/test/VRFV2PlusWrapper_Arbitrum.t.sol | 4 +- .../vrf/test/VRFV2PlusWrapper_Migration.t.sol | 4 +- .../vrf/test/VRFV2PlusWrapper_Optimism.t.sol | 4 +- .../testhelpers/ChainSpecificUtilHelper.sol | 2 +- .../vrf/testhelpers/VRFCoordinatorTestV2.sol | 10 +- .../testhelpers/VRFV2LoadTestWithMetrics.sol | 2 +- .../testhelpers/VRFV2OwnerTestConsumer.sol | 2 +- .../VRFV2WrapperLoadTestConsumer.sol | 2 +- contracts/test/v0.8/Chainlink.test.ts | 182 ------- contracts/test/v0.8/ChainlinkClient.test.ts | 452 ------------------ contracts/test/v0.8/Flags.test.ts | 405 ---------------- .../test/v0.8/HeartbeatRequester.test.ts | 142 ------ .../v0.8/PermissionedForwardProxy.test.ts | 176 ------- contracts/test/v0.8/ValidatorProxy.test.ts | 403 ---------------- .../automation/AutomationGasAnalysis.test.ts | 2 +- .../automation/AutomationRegistrar2_1.test.ts | 2 +- .../automation/AutomationRegistrar2_3.test.ts | 2 +- .../automation/AutomationRegistry2_2.test.ts | 2 +- .../automation/AutomationRegistry2_3.test.ts | 2 +- .../v0.8/automation/KeeperCompatible.test.ts | 2 +- .../automation/UpkeepTranscoder3_0.test.ts | 2 +- .../automation/UpkeepTranscoder4_0.test.ts | 2 +- .../ZKSyncAutomationRegistry2_3.test.ts | 2 +- .../AuthorizedForwarder.test.ts | 2 +- core/chains/evm/logpoller/helper_test.go | 2 +- .../evm/logpoller/log_poller_internal_test.go | 2 +- core/chains/evm/logpoller/log_poller_test.go | 2 +- core/gethwrappers/abigen_test.go | 2 +- .../type_and_version_interface_wrapper.go | 183 ------- ...rapper-dependency-versions-do-not-edit.txt | 3 - core/gethwrappers/go_generate.go | 1 - core/gethwrappers/go_generate_logpoller.go | 7 - .../generated/log_emitter/log_emitter.go | 2 +- .../type_and_version/type_and_version.go | 183 +++++++ .../vrf_log_emitter/vrf_log_emitter.go | 2 +- ...rapper-dependency-versions-do-not-edit.txt | 5 +- core/gethwrappers/shared/go_generate.go | 5 +- core/services/keeper/registry_interface.go | 8 +- .../plugins/ccip/config/type_and_version.go | 4 +- .../batchreader/token_pool_batch_reader.go | 4 +- .../capabilities/testutils/chain_reader.go | 2 +- .../vrf/v2/listener_v2_log_listener_test.go | 4 +- .../ccip-tests/contracts/contract_deployer.go | 4 +- integration-tests/contracts/test_contracts.go | 2 +- .../automationv2_1/automationv2_1_test.go | 2 +- integration-tests/load/automationv2_1/gun.go | 2 +- .../universal/log_poller/helpers.go | 2 +- tools/ci/ccip_lcov_prune | 3 - 180 files changed, 416 insertions(+), 3111 deletions(-) create mode 100644 .changeset/cold-coats-battle.md create mode 100644 contracts/.changeset/angry-needles-approve.md delete mode 100755 contracts/scripts/native_solc_compile_all_logpoller delete mode 100644 contracts/src/v0.8/Denominations.sol delete mode 100644 contracts/src/v0.8/Flags.sol delete mode 100644 contracts/src/v0.8/PermissionedForwardProxy.sol delete mode 100644 contracts/src/v0.8/ValidatorProxy.sol rename contracts/src/v0.8/{tests => automation/mocks}/MockArbGasInfo.sol (100%) rename contracts/src/v0.8/{ => automation}/mocks/MockArbSys.sol (100%) rename contracts/src/v0.8/{tests => automation/mocks}/MockGasBoundCaller.sol (100%) rename contracts/src/v0.8/{tests => automation/mocks}/MockZKSyncSystemContext.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/AutomationConsumerBenchmark.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/CronReceiver.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/ERC20BalanceMonitorExposed.sol (89%) rename contracts/src/v0.8/{tests => automation/testhelpers}/EthBalanceMonitorExposed.sol (88%) rename contracts/src/v0.8/{tests => automation/testhelpers}/KeeperCompatibleTestHelper.sol (88%) rename contracts/src/v0.8/{tests => automation/testhelpers}/MockOVMGasPriceOracle.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/ReceiveEmitter.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/ReceiveFallbackEmitter.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/ReceiveReverter.sol (100%) rename contracts/src/v0.8/{tests => automation/testhelpers}/StreamsLookupUpkeep.sol (95%) rename contracts/src/v0.8/{tests => automation/testhelpers}/VerifiableLoadBase.sol (98%) rename contracts/src/v0.8/{tests => automation/testhelpers}/VerifiableLoadLogTriggerUpkeep.sol (97%) rename contracts/src/v0.8/{tests => automation/testhelpers}/VerifiableLoadStreamsLookupUpkeep.sol (97%) rename contracts/src/v0.8/{tests => automation/testhelpers}/VerifiableLoadUpkeep.sol (100%) rename contracts/src/v0.8/{mocks => functions/tests/v1_X/testhelpers}/MockLinkToken.sol (94%) delete mode 100644 contracts/src/v0.8/interfaces/FeedRegistryInterface.sol delete mode 100644 contracts/src/v0.8/interfaces/FlagsInterface.sol delete mode 100644 contracts/src/v0.8/interfaces/PoRAddressList.sol delete mode 100644 contracts/src/v0.8/interfaces/TypeAndVersionInterface.sol rename contracts/src/v0.8/{tests => l2ep/test}/FeedConsumer.sol (92%) rename contracts/src/v0.8/{tests => l2ep/test}/Greeter.sol (82%) rename contracts/src/v0.8/{tests => l2ep/test/mocks}/MockArbitrumInbox.sol (94%) rename contracts/src/v0.8/{tests => l2ep/test/mocks}/MockOptimismL1CrossDomainMessenger.sol (100%) rename contracts/src/v0.8/{tests => l2ep/test/mocks}/MockOptimismL2CrossDomainMessenger.sol (100%) delete mode 100644 contracts/src/v0.8/mocks/MockAggregatorValidator.sol delete mode 100644 contracts/src/v0.8/mocks/MockOffchainAggregator.sol rename contracts/src/v0.8/{ => operatorforwarder}/Chainlink.sol (96%) rename contracts/src/v0.8/{ => operatorforwarder}/ChainlinkClient.sol (98%) rename contracts/src/v0.8/{ => operatorforwarder}/interfaces/ChainlinkRequestInterface.sol (100%) rename contracts/src/v0.8/{ => operatorforwarder}/interfaces/ENSInterface.sol (100%) rename contracts/src/v0.8/{ => operatorforwarder}/interfaces/OperatorInterface.sol (100%) rename contracts/src/v0.8/{ => operatorforwarder}/interfaces/OracleInterface.sol (100%) rename contracts/src/v0.8/{ => operatorforwarder}/interfaces/PointerInterface.sol (100%) rename contracts/src/v0.8/{tests => operatorforwarder/test}/Broken.sol (95%) rename contracts/src/v0.8/{tests => shared/mocks}/MockV3Aggregator.sol (95%) rename contracts/src/v0.8/{tests => shared/test/helpers}/LogEmitter.sol (97%) rename contracts/src/v0.8/{tests => shared/test/helpers}/VRFLogEmitter.sol (100%) rename contracts/src/v0.8/{ => shared/util}/ChainSpecificUtil.sol (95%) delete mode 100644 contracts/src/v0.8/tests/ChainlinkClientTestHelper.sol delete mode 100644 contracts/src/v0.8/tests/ChainlinkTestHelper.sol delete mode 100644 contracts/src/v0.8/tests/Counter.sol delete mode 100644 contracts/src/v0.8/tests/FlagsTestHelper.sol delete mode 100644 contracts/src/v0.8/tests/MockETHLINKAggregator.sol rename contracts/src/v0.8/{ => vrf}/ChainSpecificUtil_v0_8_6.sol (96%) delete mode 100644 contracts/test/v0.8/Chainlink.test.ts delete mode 100644 contracts/test/v0.8/ChainlinkClient.test.ts delete mode 100644 contracts/test/v0.8/Flags.test.ts delete mode 100644 contracts/test/v0.8/HeartbeatRequester.test.ts delete mode 100644 contracts/test/v0.8/PermissionedForwardProxy.test.ts delete mode 100644 contracts/test/v0.8/ValidatorProxy.test.ts delete mode 100644 core/gethwrappers/generated/type_and_version_interface_wrapper/type_and_version_interface_wrapper.go delete mode 100644 core/gethwrappers/go_generate_logpoller.go rename core/gethwrappers/{ => shared}/generated/log_emitter/log_emitter.go (93%) create mode 100644 core/gethwrappers/shared/generated/type_and_version/type_and_version.go rename core/gethwrappers/{ => shared}/generated/vrf_log_emitter/vrf_log_emitter.go (88%) diff --git a/.changeset/cold-coats-battle.md b/.changeset/cold-coats-battle.md new file mode 100644 index 00000000000..1a72d025bde --- /dev/null +++ b/.changeset/cold-coats-battle.md @@ -0,0 +1,5 @@ +--- +"chainlink": patch +--- + +#internal minor rename of various gethwrappers diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9f19d52b7ea..6e05a6f1c10 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -89,14 +89,11 @@ core/scripts/gateway @smartcontractkit/dev-services /contracts/src/v0.8/automation @smartcontractkit/dev-services /contracts/src/v0.8/ccip @smartcontractkit/ccip-onchain /contracts/src/v0.8/functions @smartcontractkit/dev-services -# TODO: interfaces folder, folder should be removed and files moved to the correct folders /contracts/src/v0.8/l2ep @smartcontractkit/bix-build /contracts/src/v0.8/llo-feeds @smartcontractkit/data-streams-engineers # TODO: mocks folder, folder should be removed and files moved to the correct folders /contracts/src/v0.8/operatorforwarder @smartcontractkit/data-feeds-engineers /contracts/src/v0.8/shared @smartcontractkit/core-solidity -# TODO: tests folder, folder should be removed and files moved to the correct folders -# TODO: transmission folder, owner should be found /contracts/src/v0.8/vrf @smartcontractkit/dev-services /contracts/src/v0.8/keystone @smartcontractkit/keystone /contracts/src/v0.8/workflow @smartcontractkit/dev-services diff --git a/.github/workflows/solidity-foundry-artifacts.yml b/.github/workflows/solidity-foundry-artifacts.yml index 620d491e82d..5665c786057 100644 --- a/.github/workflows/solidity-foundry-artifacts.yml +++ b/.github/workflows/solidity-foundry-artifacts.yml @@ -69,7 +69,6 @@ jobs: - '!contracts/src/v0.8/**/*.t.sol' - '!contracts/src/v0.8/*.t.sol' - '!contracts/src/v0.8/**/testhelpers/**' - - '!contracts/src/v0.8/testhelpers/**' - '!contracts/src/v0.8/vendor/**' other_shared: - modified|added: 'contracts/src/v0.8/(interfaces/**/*.sol|*.sol)' diff --git a/contracts/.changeset/angry-needles-approve.md b/contracts/.changeset/angry-needles-approve.md new file mode 100644 index 00000000000..689f2ac6063 --- /dev/null +++ b/contracts/.changeset/angry-needles-approve.md @@ -0,0 +1,5 @@ +--- +'@chainlink/contracts': minor +--- + +#internal Removal and moving of various older Solidity contracts. Unused test helpers are removed, used files are now in their proper product folders diff --git a/contracts/STYLE_GUIDE.md b/contracts/STYLE_GUIDE.md index f1faab09644..1fbdc061f23 100644 --- a/contracts/STYLE_GUIDE.md +++ b/contracts/STYLE_GUIDE.md @@ -265,9 +265,6 @@ All contracts will expose a `typeAndVersion` constant. The string has the following format: `-` with the `-dev` part only being applicable to contracts that have not been fully released. Try to fit it into 32 bytes to keep the impact on contract sizes minimal. -Note that `ITypeAndVersion` should be used, not `TypeAndVersionInterface`. - - diff --git a/contracts/scripts/lcov_prune b/contracts/scripts/lcov_prune index 9d5d592c646..9dbd6781d96 100755 --- a/contracts/scripts/lcov_prune +++ b/contracts/scripts/lcov_prune @@ -27,8 +27,6 @@ exclusion_list_ccip=( "src/v0.8/ccip/libraries/USDPriceWith18Decimals.sol" "src/v0.8/ccip/libraries/MerkleMultiProof.sol" "src/v0.8/ccip/libraries/Pool.sol" - "src/v0.8/ConfirmedOwnerWithProposal.sol" - "src/v0.8/tests/MockV3Aggregator.sol" "src/v0.8/ccip/applications/CCIPClientExample.sol" "src/v0.8/keystone/*" ) diff --git a/contracts/scripts/native_solc_compile_all b/contracts/scripts/native_solc_compile_all index 42abac3c6b3..a66456bb6d5 100755 --- a/contracts/scripts/native_solc_compile_all +++ b/contracts/scripts/native_solc_compile_all @@ -12,7 +12,7 @@ python3 -m pip install --require-hashes -r $SCRIPTPATH/requirements.txt # 6 and 7 are legacy contracts, for each other product we have a native_solc_compile_all_$product script # These scripts can be run individually, or all together with this script. # To add new CL products, simply write a native_solc_compile_all_$product script and add it to the list below. -for product in automation events_mock feeds functions keystone llo-feeds logpoller operatorforwarder shared vrf ccip liquiditymanager workflow +for product in automation events_mock feeds functions keystone llo-feeds operatorforwarder shared vrf ccip liquiditymanager workflow do $SCRIPTPATH/native_solc_compile_all_$product done diff --git a/contracts/scripts/native_solc_compile_all_automation b/contracts/scripts/native_solc_compile_all_automation index e189e78cb0f..eb4b39201ba 100755 --- a/contracts/scripts/native_solc_compile_all_automation +++ b/contracts/scripts/native_solc_compile_all_automation @@ -73,11 +73,11 @@ compileContract automation/testhelpers/UpkeepCounter.sol compileContract automation/interfaces/StreamsLookupCompatibleInterface.sol -compileContract tests/VerifiableLoadUpkeep.sol -compileContract tests/VerifiableLoadStreamsLookupUpkeep.sol -compileContract tests/VerifiableLoadLogTriggerUpkeep.sol -compileContract tests/AutomationConsumerBenchmark.sol -compileContract tests/StreamsLookupUpkeep.sol +compileContract automation/testhelpers/VerifiableLoadUpkeep.sol +compileContract automation/testhelpers/VerifiableLoadStreamsLookupUpkeep.sol +compileContract automation/testhelpers/VerifiableLoadLogTriggerUpkeep.sol +compileContract automation/testhelpers/AutomationConsumerBenchmark.sol +compileContract automation/testhelpers/StreamsLookupUpkeep.sol SOLC_VERSION="0.8.19" diff --git a/contracts/scripts/native_solc_compile_all_feeds b/contracts/scripts/native_solc_compile_all_feeds index 66cb3f19161..c6b80958156 100755 --- a/contracts/scripts/native_solc_compile_all_feeds +++ b/contracts/scripts/native_solc_compile_all_feeds @@ -30,5 +30,5 @@ compileContract () { } # Aggregators -compileContract Chainlink.sol -compileContract ChainlinkClient.sol +compileContract operatorforwarder/Chainlink.sol +compileContract operatorforwarder/ChainlinkClient.sol diff --git a/contracts/scripts/native_solc_compile_all_logpoller b/contracts/scripts/native_solc_compile_all_logpoller deleted file mode 100755 index e8ea2a2be80..00000000000 --- a/contracts/scripts/native_solc_compile_all_logpoller +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -set -e - -echo " ┌──────────────────────────────────────────────┐" -echo " │ Compiling LogPoller contracts... │" -echo " └──────────────────────────────────────────────┘" - -SOLC_VERSION="0.8.19" -OPTIMIZE_RUNS=1000000 - - -SCRIPTPATH="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -python3 -m pip install --require-hashes -r "$SCRIPTPATH"/requirements.txt -solc-select install $SOLC_VERSION -solc-select use $SOLC_VERSION -export SOLC_VERSION=$SOLC_VERSION - -ROOT="$( cd "$(dirname "$0")" >/dev/null 2>&1 ; cd ../../ && pwd -P )" - -compileContract () { - local contract - contract=$(basename "$1" ".sol") - - solc --overwrite --optimize --optimize-runs $OPTIMIZE_RUNS --metadata-hash none \ - -o "$ROOT"/contracts/solc/v$SOLC_VERSION/"$contract" \ - --abi --bin --allow-paths "$ROOT"/contracts/src/v0.8\ - "$ROOT"/contracts/src/v0.8/"$1" -} - - -compileContract tests/LogEmitter.sol -compileContract tests/VRFLogEmitter.sol \ No newline at end of file diff --git a/contracts/scripts/native_solc_compile_all_shared b/contracts/scripts/native_solc_compile_all_shared index d205b51321c..58f24fdaa22 100755 --- a/contracts/scripts/native_solc_compile_all_shared +++ b/contracts/scripts/native_solc_compile_all_shared @@ -33,13 +33,16 @@ compileContract() { $command } +compileContract interfaces/AggregatorV3Interface +compileContract interfaces/ITypeAndVersion compileContract token/ERC677/BurnMintERC677 compileContract token/ERC677/LinkToken compileContract token/ERC20/BurnMintERC20 compileContract test/helpers/ChainReaderTester +compileContract test/helpers/LogEmitter +compileContract test/helpers/VRFLogEmitter +compileContract mocks/MockV3Aggregator compileContract mocks/WERC20Mock -compileContract interfaces/AggregatorV3Interface compileContract openzeppelin-solidity/v4.8.3/contracts/token/ERC20/ERC20 vendor compileContract multicall/ebd8b64/src/Multicall3 vendor -compileContract MockV3Aggregator tests \ No newline at end of file diff --git a/contracts/src/v0.8/Denominations.sol b/contracts/src/v0.8/Denominations.sol deleted file mode 100644 index 6e9aa778ec7..00000000000 --- a/contracts/src/v0.8/Denominations.sol +++ /dev/null @@ -1,28 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -library Denominations { - address public constant ETH = 0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE; - address public constant BTC = 0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB; - - // Fiat currencies follow https://en.wikipedia.org/wiki/ISO_4217 - address public constant USD = address(840); - address public constant GBP = address(826); - address public constant EUR = address(978); - address public constant JPY = address(392); - address public constant KRW = address(410); - address public constant CNY = address(156); - address public constant AUD = address(36); - address public constant CAD = address(124); - address public constant CHF = address(756); - address public constant ARS = address(32); - address public constant PHP = address(608); - address public constant NZD = address(554); - address public constant SGD = address(702); - address public constant NGN = address(566); - address public constant ZAR = address(710); - address public constant RUB = address(643); - address public constant INR = address(356); - address public constant BRL = address(986); -} diff --git a/contracts/src/v0.8/Flags.sol b/contracts/src/v0.8/Flags.sol deleted file mode 100644 index de14583bcb4..00000000000 --- a/contracts/src/v0.8/Flags.sol +++ /dev/null @@ -1,124 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import {SimpleReadAccessController} from "./shared/access/SimpleReadAccessController.sol"; -import {AccessControllerInterface} from "./shared/interfaces/AccessControllerInterface.sol"; -import {FlagsInterface} from "./interfaces/FlagsInterface.sol"; - -/** - * @title The Flags contract - * @notice Allows flags to signal to any reader on the access control list. - * The owner can set flags, or designate other addresses to set flags. The - * owner must turn the flags off, other setters cannot. An expected pattern is - * to allow addresses to raise flags on themselves, so if you are subscribing to - * FlagOn events you should filter for addresses you care about. - */ -// solhint-disable gas-custom-errors -contract Flags is FlagsInterface, SimpleReadAccessController { - AccessControllerInterface public raisingAccessController; - - mapping(address => bool) private s_flags; - - event FlagRaised(address indexed subject); - event FlagLowered(address indexed subject); - event RaisingAccessControllerUpdated(address indexed previous, address indexed current); - - /** - * @param racAddress address for the raising access controller. - */ - constructor(address racAddress) { - setRaisingAccessController(racAddress); - } - - /** - * @notice read the warning flag status of a contract address. - * @param subject The contract address being checked for a flag. - * @return A true value indicates that a flag was raised and a - * false value indicates that no flag was raised. - */ - function getFlag(address subject) external view override checkAccess returns (bool) { - return s_flags[subject]; - } - - /** - * @notice read the warning flag status of a contract address. - * @param subjects An array of addresses being checked for a flag. - * @return An array of bools where a true value for any flag indicates that - * a flag was raised and a false value indicates that no flag was raised. - */ - function getFlags(address[] calldata subjects) external view override checkAccess returns (bool[] memory) { - bool[] memory responses = new bool[](subjects.length); - for (uint256 i = 0; i < subjects.length; i++) { - responses[i] = s_flags[subjects[i]]; - } - return responses; - } - - /** - * @notice enable the warning flag for an address. - * Access is controlled by raisingAccessController, except for owner - * who always has access. - * @param subject The contract address whose flag is being raised - */ - function raiseFlag(address subject) external override { - require(_allowedToRaiseFlags(), "Not allowed to raise flags"); - - _tryToRaiseFlag(subject); - } - - /** - * @notice enable the warning flags for multiple addresses. - * Access is controlled by raisingAccessController, except for owner - * who always has access. - * @param subjects List of the contract addresses whose flag is being raised - */ - function raiseFlags(address[] calldata subjects) external override { - require(_allowedToRaiseFlags(), "Not allowed to raise flags"); - - for (uint256 i = 0; i < subjects.length; i++) { - _tryToRaiseFlag(subjects[i]); - } - } - - /** - * @notice allows owner to disable the warning flags for multiple addresses. - * @param subjects List of the contract addresses whose flag is being lowered - */ - function lowerFlags(address[] calldata subjects) external override onlyOwner { - for (uint256 i = 0; i < subjects.length; i++) { - address subject = subjects[i]; - - if (s_flags[subject]) { - s_flags[subject] = false; - emit FlagLowered(subject); - } - } - } - - /** - * @notice allows owner to change the access controller for raising flags. - * @param racAddress new address for the raising access controller. - */ - function setRaisingAccessController(address racAddress) public override onlyOwner { - address previous = address(raisingAccessController); - - if (previous != racAddress) { - raisingAccessController = AccessControllerInterface(racAddress); - - emit RaisingAccessControllerUpdated(previous, racAddress); - } - } - - // PRIVATE - - function _allowedToRaiseFlags() private view returns (bool) { - return msg.sender == owner() || raisingAccessController.hasAccess(msg.sender, msg.data); - } - - function _tryToRaiseFlag(address subject) private { - if (!s_flags[subject]) { - s_flags[subject] = true; - emit FlagRaised(subject); - } - } -} diff --git a/contracts/src/v0.8/PermissionedForwardProxy.sol b/contracts/src/v0.8/PermissionedForwardProxy.sol deleted file mode 100644 index 544f89065c0..00000000000 --- a/contracts/src/v0.8/PermissionedForwardProxy.sol +++ /dev/null @@ -1,65 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.6; - -import {Address} from "@openzeppelin/contracts/utils/Address.sol"; -import {ConfirmedOwner} from "./shared/access/ConfirmedOwner.sol"; - -/** - * @title PermissionedForwardProxy - * @notice This proxy is used to forward calls from sender to target. It maintains - * a permission list to check which sender is allowed to call which target - */ -contract PermissionedForwardProxy is ConfirmedOwner { - using Address for address; - - error PermissionNotSet(); - - event PermissionSet(address indexed sender, address target); - event PermissionRemoved(address indexed sender); - - mapping(address => address) private s_forwardPermissionList; - - constructor() ConfirmedOwner(msg.sender) {} - - /** - * @notice Verifies if msg.sender has permission to forward to target address and then forwards the handler - * @param target address of the contract to forward the handler to - * @param handler bytes to be passed to target in call data - */ - function forward(address target, bytes calldata handler) external { - if (s_forwardPermissionList[msg.sender] != target) { - revert PermissionNotSet(); - } - target.functionCall(handler); - } - - /** - * @notice Adds permission for sender to forward calls to target via this proxy. - * Note that it allows to overwrite an existing permission - * @param sender The address who will use this proxy to forward calls - * @param target The address where sender will be allowed to forward calls - */ - function setPermission(address sender, address target) external onlyOwner { - s_forwardPermissionList[sender] = target; - - emit PermissionSet(sender, target); - } - - /** - * @notice Removes permission for sender to forward calls via this proxy - * @param sender The address who will use this proxy to forward calls - */ - function removePermission(address sender) external onlyOwner { - delete s_forwardPermissionList[sender]; - - emit PermissionRemoved(sender); - } - - /** - * @notice Returns the target address that the sender can use this proxy for - * @param sender The address to fetch the permissioned target for - */ - function getPermission(address sender) external view returns (address) { - return s_forwardPermissionList[sender]; - } -} diff --git a/contracts/src/v0.8/ValidatorProxy.sol b/contracts/src/v0.8/ValidatorProxy.sol deleted file mode 100644 index 58e0e28a899..00000000000 --- a/contracts/src/v0.8/ValidatorProxy.sol +++ /dev/null @@ -1,230 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import {ConfirmedOwner} from "./shared/access/ConfirmedOwner.sol"; -import {AggregatorValidatorInterface} from "./shared/interfaces/AggregatorValidatorInterface.sol"; -import {TypeAndVersionInterface} from "./interfaces/TypeAndVersionInterface.sol"; - -// solhint-disable gas-custom-errors -contract ValidatorProxy is AggregatorValidatorInterface, TypeAndVersionInterface, ConfirmedOwner { - /// @notice Uses a single storage slot to store the current address - struct AggregatorConfiguration { - address target; - bool hasNewProposal; - } - - struct ValidatorConfiguration { - AggregatorValidatorInterface target; - bool hasNewProposal; - } - - // Configuration for the current aggregator - AggregatorConfiguration private s_currentAggregator; - // Proposed aggregator address - address private s_proposedAggregator; - - // Configuration for the current validator - ValidatorConfiguration private s_currentValidator; - // Proposed validator address - AggregatorValidatorInterface private s_proposedValidator; - - event AggregatorProposed(address indexed aggregator); - event AggregatorUpgraded(address indexed previous, address indexed current); - event ValidatorProposed(AggregatorValidatorInterface indexed validator); - event ValidatorUpgraded(AggregatorValidatorInterface indexed previous, AggregatorValidatorInterface indexed current); - /// @notice The proposed aggregator called validate, but the call was not passed on to any validators - event ProposedAggregatorValidateCall( - address indexed proposed, - uint256 previousRoundId, - int256 previousAnswer, - uint256 currentRoundId, - int256 currentAnswer - ); - - /** - * @notice Construct the ValidatorProxy with an aggregator and a validator - * @param aggregator address - * @param validator address - */ - constructor(address aggregator, AggregatorValidatorInterface validator) ConfirmedOwner(msg.sender) { - s_currentAggregator = AggregatorConfiguration({target: aggregator, hasNewProposal: false}); - s_currentValidator = ValidatorConfiguration({target: validator, hasNewProposal: false}); - } - - /** - * @notice Validate a transmission - * @dev Must be called by either the `s_currentAggregator.target`, or the `s_proposedAggregator`. - * If called by the `s_currentAggregator.target` this function passes the call on to the `s_currentValidator.target` - * and the `s_proposedValidator`, if it is set. - * If called by the `s_proposedAggregator` this function emits a `ProposedAggregatorValidateCall` to signal that - * the call was received. - * @dev To guard against external `validate` calls reverting, we use raw calls here. - * We favour `call` over try-catch to ensure that failures are avoided even if the validator address is incorrectly - * set as a non-contract address. - * @dev If the `aggregator` and `validator` are the same contract or collude, this could exhibit reentrancy behavior. - * However, since that contract would have to be explicitly written for reentrancy and that the `owner` would have - * to configure this contract to use that malicious contract, we refrain from using mutex or check here. - * @dev This does not perform any checks on any roundId, so it is possible that a validator receive different reports - * for the same roundId at different points in time. Validator implementations should be aware of this. - * @param previousRoundId uint256 - * @param previousAnswer int256 - * @param currentRoundId uint256 - * @param currentAnswer int256 - * @return bool - */ - function validate( - uint256 previousRoundId, - int256 previousAnswer, - uint256 currentRoundId, - int256 currentAnswer - ) external override returns (bool) { - address currentAggregator = s_currentAggregator.target; - if (msg.sender != currentAggregator) { - address proposedAggregator = s_proposedAggregator; - require(msg.sender == proposedAggregator, "Not a configured aggregator"); - // If the aggregator is still in proposed state, emit an event and don't push to any validator. - // This is to confirm that `validate` is being called prior to upgrade. - emit ProposedAggregatorValidateCall( - proposedAggregator, - previousRoundId, - previousAnswer, - currentRoundId, - currentAnswer - ); - return true; - } - - // Send the validate call to the current validator - ValidatorConfiguration memory currentValidator = s_currentValidator; - address currentValidatorAddress = address(currentValidator.target); - require(currentValidatorAddress != address(0), "No validator set"); - // solhint-disable-next-line avoid-low-level-calls - currentValidatorAddress.call( - abi.encodeWithSelector( - AggregatorValidatorInterface.validate.selector, - previousRoundId, - previousAnswer, - currentRoundId, - currentAnswer - ) - ); - // If there is a new proposed validator, send the validate call to that validator also - if (currentValidator.hasNewProposal) { - // solhint-disable-next-line avoid-low-level-calls - address(s_proposedValidator).call( - abi.encodeWithSelector( - AggregatorValidatorInterface.validate.selector, - previousRoundId, - previousAnswer, - currentRoundId, - currentAnswer - ) - ); - } - return true; - } - - /** AGGREGATOR CONFIGURATION FUNCTIONS **/ - - /** - * @notice Propose an aggregator - * @dev A zero address can be used to unset the proposed aggregator. Only owner can call. - * @param proposed address - */ - function proposeNewAggregator(address proposed) external onlyOwner { - require(s_proposedAggregator != proposed && s_currentAggregator.target != proposed, "Invalid proposal"); - s_proposedAggregator = proposed; - // If proposed is zero address, hasNewProposal = false - s_currentAggregator.hasNewProposal = (proposed != address(0)); - emit AggregatorProposed(proposed); - } - - /** - * @notice Upgrade the aggregator by setting the current aggregator as the proposed aggregator. - * @dev Must have a proposed aggregator. Only owner can call. - */ - function upgradeAggregator() external onlyOwner { - // Get configuration in memory - AggregatorConfiguration memory current = s_currentAggregator; - address previous = current.target; - address proposed = s_proposedAggregator; - - // Perform the upgrade - require(current.hasNewProposal, "No proposal"); - s_currentAggregator = AggregatorConfiguration({target: proposed, hasNewProposal: false}); - delete s_proposedAggregator; - - emit AggregatorUpgraded(previous, proposed); - } - - /** - * @notice Get aggregator details - * @return current address - * @return hasProposal bool - * @return proposed address - */ - function getAggregators() external view returns (address current, bool hasProposal, address proposed) { - current = s_currentAggregator.target; - hasProposal = s_currentAggregator.hasNewProposal; - proposed = s_proposedAggregator; - return (current, hasProposal, proposed); - } - - /** VALIDATOR CONFIGURATION FUNCTIONS **/ - - /** - * @notice Propose an validator - * @dev A zero address can be used to unset the proposed validator. Only owner can call. - * @param proposed address - */ - function proposeNewValidator(AggregatorValidatorInterface proposed) external onlyOwner { - require(s_proposedValidator != proposed && s_currentValidator.target != proposed, "Invalid proposal"); - s_proposedValidator = proposed; - // If proposed is zero address, hasNewProposal = false - s_currentValidator.hasNewProposal = (address(proposed) != address(0)); - emit ValidatorProposed(proposed); - } - - /** - * @notice Upgrade the validator by setting the current validator as the proposed validator. - * @dev Must have a proposed validator. Only owner can call. - */ - function upgradeValidator() external onlyOwner { - // Get configuration in memory - ValidatorConfiguration memory current = s_currentValidator; - AggregatorValidatorInterface previous = current.target; - AggregatorValidatorInterface proposed = s_proposedValidator; - - // Perform the upgrade - require(current.hasNewProposal, "No proposal"); - s_currentValidator = ValidatorConfiguration({target: proposed, hasNewProposal: false}); - delete s_proposedValidator; - - emit ValidatorUpgraded(previous, proposed); - } - - /** - * @notice Get validator details - * @return current address - * @return hasProposal bool - * @return proposed address - */ - function getValidators() - external - view - returns (AggregatorValidatorInterface current, bool hasProposal, AggregatorValidatorInterface proposed) - { - current = s_currentValidator.target; - hasProposal = s_currentValidator.hasNewProposal; - proposed = s_proposedValidator; - return (current, hasProposal, proposed); - } - - /** - * @notice The type and version of this contract - * @return Type and version string - */ - function typeAndVersion() external pure virtual override returns (string memory) { - return "ValidatorProxy 1.0.0"; - } -} diff --git a/contracts/src/v0.8/automation/HeartbeatRequester.sol b/contracts/src/v0.8/automation/HeartbeatRequester.sol index 8ef7fa44422..077bb93d18f 100644 --- a/contracts/src/v0.8/automation/HeartbeatRequester.sol +++ b/contracts/src/v0.8/automation/HeartbeatRequester.sol @@ -2,7 +2,7 @@ // solhint-disable-next-line one-contract-per-file pragma solidity 0.8.6; -import {TypeAndVersionInterface} from "./../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "./../shared/interfaces/ITypeAndVersion.sol"; import {ConfirmedOwner} from "../shared/access/ConfirmedOwner.sol"; // defines some interfaces for type safety and reduces encoding/decoding @@ -20,7 +20,7 @@ interface IOffchainAggregator { * by eligible caller, it will call a proxy for an aggregator address and request a new round. The aggregator * is gated by permissions and this requester address needs to be whitelisted. */ -contract HeartbeatRequester is TypeAndVersionInterface, ConfirmedOwner { +contract HeartbeatRequester is ITypeAndVersion, ConfirmedOwner { event HeartbeatPermitted(address indexed permittedCaller, address newProxy, address oldProxy); event HeartbeatRemoved(address indexed permittedCaller, address removedProxy); diff --git a/contracts/src/v0.8/automation/UpkeepTranscoder.sol b/contracts/src/v0.8/automation/UpkeepTranscoder.sol index 03f40d890b8..5e60270d355 100644 --- a/contracts/src/v0.8/automation/UpkeepTranscoder.sol +++ b/contracts/src/v0.8/automation/UpkeepTranscoder.sol @@ -3,14 +3,14 @@ pragma solidity ^0.8.0; import {UpkeepTranscoderInterface} from "./interfaces/UpkeepTranscoderInterface.sol"; -import {TypeAndVersionInterface} from "../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../shared/interfaces/ITypeAndVersion.sol"; import {UpkeepFormat} from "./UpkeepFormat.sol"; /** * @notice Transcoder for converting upkeep data from one keeper * registry version to another */ -contract UpkeepTranscoder is UpkeepTranscoderInterface, TypeAndVersionInterface { +contract UpkeepTranscoder is UpkeepTranscoderInterface, ITypeAndVersion { error InvalidTranscoding(); /** diff --git a/contracts/src/v0.8/automation/dev/MercuryRegistry.sol b/contracts/src/v0.8/automation/dev/MercuryRegistry.sol index 247301a7438..9035f0af927 100644 --- a/contracts/src/v0.8/automation/dev/MercuryRegistry.sol +++ b/contracts/src/v0.8/automation/dev/MercuryRegistry.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {AutomationCompatibleInterface} from "../interfaces/AutomationCompatibleInterface.sol"; import {StreamsLookupCompatibleInterface} from "../interfaces/StreamsLookupCompatibleInterface.sol"; -import {ChainSpecificUtil} from "../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../shared/util/ChainSpecificUtil.sol"; /*--------------------------------------------------------------------------------------------------------------------+ | Mercury + Automation | diff --git a/contracts/src/v0.8/tests/MockArbGasInfo.sol b/contracts/src/v0.8/automation/mocks/MockArbGasInfo.sol similarity index 100% rename from contracts/src/v0.8/tests/MockArbGasInfo.sol rename to contracts/src/v0.8/automation/mocks/MockArbGasInfo.sol diff --git a/contracts/src/v0.8/mocks/MockArbSys.sol b/contracts/src/v0.8/automation/mocks/MockArbSys.sol similarity index 100% rename from contracts/src/v0.8/mocks/MockArbSys.sol rename to contracts/src/v0.8/automation/mocks/MockArbSys.sol diff --git a/contracts/src/v0.8/tests/MockGasBoundCaller.sol b/contracts/src/v0.8/automation/mocks/MockGasBoundCaller.sol similarity index 100% rename from contracts/src/v0.8/tests/MockGasBoundCaller.sol rename to contracts/src/v0.8/automation/mocks/MockGasBoundCaller.sol diff --git a/contracts/src/v0.8/tests/MockZKSyncSystemContext.sol b/contracts/src/v0.8/automation/mocks/MockZKSyncSystemContext.sol similarity index 100% rename from contracts/src/v0.8/tests/MockZKSyncSystemContext.sol rename to contracts/src/v0.8/automation/mocks/MockZKSyncSystemContext.sol diff --git a/contracts/src/v0.8/automation/test/v2_3/BaseTest.t.sol b/contracts/src/v0.8/automation/test/v2_3/BaseTest.t.sol index e0d15daab6c..f1086e7bfa4 100644 --- a/contracts/src/v0.8/automation/test/v2_3/BaseTest.t.sol +++ b/contracts/src/v0.8/automation/test/v2_3/BaseTest.t.sol @@ -6,7 +6,7 @@ import "forge-std/Test.sol"; import {LinkToken} from "../../../shared/token/ERC677/LinkToken.sol"; import {ERC20Mock} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/mocks/ERC20Mock.sol"; import {ERC20Mock6Decimals} from "../../mocks/ERC20Mock6Decimals.sol"; -import {MockV3Aggregator} from "../../../tests/MockV3Aggregator.sol"; +import {MockV3Aggregator} from "../../../shared/mocks/MockV3Aggregator.sol"; import {AutomationForwarderLogic} from "../../AutomationForwarderLogic.sol"; import {UpkeepTranscoder5_0 as Transcoder} from "../../v2_3/UpkeepTranscoder5_0.sol"; import {AutomationRegistry2_3} from "../../v2_3/AutomationRegistry2_3.sol"; diff --git a/contracts/src/v0.8/automation/test/v2_3_zksync/BaseTest.t.sol b/contracts/src/v0.8/automation/test/v2_3_zksync/BaseTest.t.sol index cde05ab3a22..dde8f5b3867 100644 --- a/contracts/src/v0.8/automation/test/v2_3_zksync/BaseTest.t.sol +++ b/contracts/src/v0.8/automation/test/v2_3_zksync/BaseTest.t.sol @@ -6,7 +6,7 @@ import "forge-std/Test.sol"; import {LinkToken} from "../../../shared/token/ERC677/LinkToken.sol"; import {ERC20Mock} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/mocks/ERC20Mock.sol"; import {ERC20Mock6Decimals} from "../../mocks/ERC20Mock6Decimals.sol"; -import {MockV3Aggregator} from "../../../tests/MockV3Aggregator.sol"; +import {MockV3Aggregator} from "../../../shared/mocks/MockV3Aggregator.sol"; import {AutomationForwarderLogic} from "../../AutomationForwarderLogic.sol"; import {UpkeepTranscoder5_0 as Transcoder} from "../../v2_3/UpkeepTranscoder5_0.sol"; import {ZKSyncAutomationRegistry2_3} from "../../v2_3_zksync/ZKSyncAutomationRegistry2_3.sol"; @@ -21,8 +21,8 @@ import {IERC20Metadata as IERC20} from "../../../vendor/openzeppelin-solidity/v4 import {MockUpkeep} from "../../mocks/MockUpkeep.sol"; import {IWrappedNative} from "../../interfaces/v2_3/IWrappedNative.sol"; import {WETH9} from "../WETH9.sol"; -import {MockGasBoundCaller} from "../../../tests/MockGasBoundCaller.sol"; -import {MockZKSyncSystemContext} from "../../../tests/MockZKSyncSystemContext.sol"; +import {MockGasBoundCaller} from "../../mocks/MockGasBoundCaller.sol"; +import {MockZKSyncSystemContext} from "../../mocks/MockZKSyncSystemContext.sol"; /** * @title BaseTest provides basic test setup procedures and dependencies for use by other diff --git a/contracts/src/v0.8/tests/AutomationConsumerBenchmark.sol b/contracts/src/v0.8/automation/testhelpers/AutomationConsumerBenchmark.sol similarity index 100% rename from contracts/src/v0.8/tests/AutomationConsumerBenchmark.sol rename to contracts/src/v0.8/automation/testhelpers/AutomationConsumerBenchmark.sol diff --git a/contracts/src/v0.8/tests/CronReceiver.sol b/contracts/src/v0.8/automation/testhelpers/CronReceiver.sol similarity index 100% rename from contracts/src/v0.8/tests/CronReceiver.sol rename to contracts/src/v0.8/automation/testhelpers/CronReceiver.sol diff --git a/contracts/src/v0.8/tests/ERC20BalanceMonitorExposed.sol b/contracts/src/v0.8/automation/testhelpers/ERC20BalanceMonitorExposed.sol similarity index 89% rename from contracts/src/v0.8/tests/ERC20BalanceMonitorExposed.sol rename to contracts/src/v0.8/automation/testhelpers/ERC20BalanceMonitorExposed.sol index a29ba36eeb4..748cf1cb727 100644 --- a/contracts/src/v0.8/tests/ERC20BalanceMonitorExposed.sol +++ b/contracts/src/v0.8/automation/testhelpers/ERC20BalanceMonitorExposed.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.6; -import "../automation/upkeeps/ERC20BalanceMonitor.sol"; +import "../upkeeps/ERC20BalanceMonitor.sol"; contract ERC20BalanceMonitorExposed is ERC20BalanceMonitor { constructor( diff --git a/contracts/src/v0.8/tests/EthBalanceMonitorExposed.sol b/contracts/src/v0.8/automation/testhelpers/EthBalanceMonitorExposed.sol similarity index 88% rename from contracts/src/v0.8/tests/EthBalanceMonitorExposed.sol rename to contracts/src/v0.8/automation/testhelpers/EthBalanceMonitorExposed.sol index 74cc682df23..f27c9621c39 100644 --- a/contracts/src/v0.8/tests/EthBalanceMonitorExposed.sol +++ b/contracts/src/v0.8/automation/testhelpers/EthBalanceMonitorExposed.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.6; -import "../automation/upkeeps/EthBalanceMonitor.sol"; +import "../upkeeps/EthBalanceMonitor.sol"; contract EthBalanceMonitorExposed is EthBalanceMonitor { constructor( diff --git a/contracts/src/v0.8/tests/KeeperCompatibleTestHelper.sol b/contracts/src/v0.8/automation/testhelpers/KeeperCompatibleTestHelper.sol similarity index 88% rename from contracts/src/v0.8/tests/KeeperCompatibleTestHelper.sol rename to contracts/src/v0.8/automation/testhelpers/KeeperCompatibleTestHelper.sol index 2e931c4fb4d..3c71dc2f848 100644 --- a/contracts/src/v0.8/tests/KeeperCompatibleTestHelper.sol +++ b/contracts/src/v0.8/automation/testhelpers/KeeperCompatibleTestHelper.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import "../automation/KeeperCompatible.sol"; +import "../KeeperCompatible.sol"; contract KeeperCompatibleTestHelper is KeeperCompatible { function checkUpkeep(bytes calldata) external override returns (bool, bytes memory) {} diff --git a/contracts/src/v0.8/tests/MockOVMGasPriceOracle.sol b/contracts/src/v0.8/automation/testhelpers/MockOVMGasPriceOracle.sol similarity index 100% rename from contracts/src/v0.8/tests/MockOVMGasPriceOracle.sol rename to contracts/src/v0.8/automation/testhelpers/MockOVMGasPriceOracle.sol diff --git a/contracts/src/v0.8/tests/ReceiveEmitter.sol b/contracts/src/v0.8/automation/testhelpers/ReceiveEmitter.sol similarity index 100% rename from contracts/src/v0.8/tests/ReceiveEmitter.sol rename to contracts/src/v0.8/automation/testhelpers/ReceiveEmitter.sol diff --git a/contracts/src/v0.8/tests/ReceiveFallbackEmitter.sol b/contracts/src/v0.8/automation/testhelpers/ReceiveFallbackEmitter.sol similarity index 100% rename from contracts/src/v0.8/tests/ReceiveFallbackEmitter.sol rename to contracts/src/v0.8/automation/testhelpers/ReceiveFallbackEmitter.sol diff --git a/contracts/src/v0.8/tests/ReceiveReverter.sol b/contracts/src/v0.8/automation/testhelpers/ReceiveReverter.sol similarity index 100% rename from contracts/src/v0.8/tests/ReceiveReverter.sol rename to contracts/src/v0.8/automation/testhelpers/ReceiveReverter.sol diff --git a/contracts/src/v0.8/tests/StreamsLookupUpkeep.sol b/contracts/src/v0.8/automation/testhelpers/StreamsLookupUpkeep.sol similarity index 95% rename from contracts/src/v0.8/tests/StreamsLookupUpkeep.sol rename to contracts/src/v0.8/automation/testhelpers/StreamsLookupUpkeep.sol index dec93d5b1f7..aaf35b5c595 100644 --- a/contracts/src/v0.8/tests/StreamsLookupUpkeep.sol +++ b/contracts/src/v0.8/automation/testhelpers/StreamsLookupUpkeep.sol @@ -1,8 +1,8 @@ pragma solidity 0.8.16; -import "../automation/interfaces/AutomationCompatibleInterface.sol"; -import "../automation/interfaces/StreamsLookupCompatibleInterface.sol"; -import {ArbSys} from "../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; +import "../interfaces/AutomationCompatibleInterface.sol"; +import "../interfaces/StreamsLookupCompatibleInterface.sol"; +import {ArbSys} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; interface IVerifierProxy { /** diff --git a/contracts/src/v0.8/tests/VerifiableLoadBase.sol b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadBase.sol similarity index 98% rename from contracts/src/v0.8/tests/VerifiableLoadBase.sol rename to contracts/src/v0.8/automation/testhelpers/VerifiableLoadBase.sol index 86ebf8b8c7c..1aa181dd1d3 100644 --- a/contracts/src/v0.8/tests/VerifiableLoadBase.sol +++ b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadBase.sol @@ -1,11 +1,11 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.16; -import "../vendor/openzeppelin-solidity/v4.7.3/contracts/utils/structs/EnumerableSet.sol"; -import {IKeeperRegistryMaster, IAutomationV21PlusCommon} from "../automation/interfaces/v2_1/IKeeperRegistryMaster.sol"; -import {ArbSys} from "../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; -import "../automation/v2_1/AutomationRegistrar2_1.sol"; -import {LogTriggerConfig} from "../automation/v2_1/AutomationUtils2_1.sol"; +import "../../vendor/openzeppelin-solidity/v4.7.3/contracts/utils/structs/EnumerableSet.sol"; +import {IKeeperRegistryMaster, IAutomationV21PlusCommon} from "../interfaces/v2_1/IKeeperRegistryMaster.sol"; +import {ArbSys} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; +import "../v2_1/AutomationRegistrar2_1.sol"; +import {LogTriggerConfig} from "../v2_1/AutomationUtils2_1.sol"; abstract contract VerifiableLoadBase is ConfirmedOwner { error IndexOutOfRange(); diff --git a/contracts/src/v0.8/tests/VerifiableLoadLogTriggerUpkeep.sol b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadLogTriggerUpkeep.sol similarity index 97% rename from contracts/src/v0.8/tests/VerifiableLoadLogTriggerUpkeep.sol rename to contracts/src/v0.8/automation/testhelpers/VerifiableLoadLogTriggerUpkeep.sol index 39b95bb0ae5..400ddd0c966 100644 --- a/contracts/src/v0.8/tests/VerifiableLoadLogTriggerUpkeep.sol +++ b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadLogTriggerUpkeep.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.16; import "./VerifiableLoadBase.sol"; -import "../automation/interfaces/ILogAutomation.sol"; -import "../automation/interfaces/StreamsLookupCompatibleInterface.sol"; +import "../interfaces/ILogAutomation.sol"; +import "../interfaces/StreamsLookupCompatibleInterface.sol"; contract VerifiableLoadLogTriggerUpkeep is VerifiableLoadBase, StreamsLookupCompatibleInterface, ILogAutomation { bool public useMercury; diff --git a/contracts/src/v0.8/tests/VerifiableLoadStreamsLookupUpkeep.sol b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadStreamsLookupUpkeep.sol similarity index 97% rename from contracts/src/v0.8/tests/VerifiableLoadStreamsLookupUpkeep.sol rename to contracts/src/v0.8/automation/testhelpers/VerifiableLoadStreamsLookupUpkeep.sol index c74aec1a790..97be9ebc81a 100644 --- a/contracts/src/v0.8/tests/VerifiableLoadStreamsLookupUpkeep.sol +++ b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadStreamsLookupUpkeep.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.16; import "./VerifiableLoadBase.sol"; -import "../automation/interfaces/StreamsLookupCompatibleInterface.sol"; +import "../interfaces/StreamsLookupCompatibleInterface.sol"; contract VerifiableLoadStreamsLookupUpkeep is VerifiableLoadBase, StreamsLookupCompatibleInterface { constructor(AutomationRegistrar2_1 _registrar, bool _useArb) VerifiableLoadBase(_registrar, _useArb) {} diff --git a/contracts/src/v0.8/tests/VerifiableLoadUpkeep.sol b/contracts/src/v0.8/automation/testhelpers/VerifiableLoadUpkeep.sol similarity index 100% rename from contracts/src/v0.8/tests/VerifiableLoadUpkeep.sol rename to contracts/src/v0.8/automation/testhelpers/VerifiableLoadUpkeep.sol diff --git a/contracts/src/v0.8/automation/v1_2/KeeperRegistrar1_2.sol b/contracts/src/v0.8/automation/v1_2/KeeperRegistrar1_2.sol index f455d56f17a..d2b6e560487 100644 --- a/contracts/src/v0.8/automation/v1_2/KeeperRegistrar1_2.sol +++ b/contracts/src/v0.8/automation/v1_2/KeeperRegistrar1_2.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.6; import "../interfaces/v1_2/KeeperRegistryInterface1_2.sol"; -import "../../interfaces/TypeAndVersionInterface.sol"; +import "../../shared/interfaces/ITypeAndVersion.sol"; import "../../shared/interfaces/LinkTokenInterface.sol"; import "../../shared/access/ConfirmedOwner.sol"; import "../../shared/interfaces/IERC677Receiver.sol"; @@ -17,7 +17,7 @@ import "../../shared/interfaces/IERC677Receiver.sol"; * The idea is to have same interface(functions,events) for UI or anyone using this contract irrespective of auto approve being enabled or not. * they can just listen to `RegistrationRequested` & `RegistrationApproved` events and know the status on registrations. */ -contract KeeperRegistrar is TypeAndVersionInterface, ConfirmedOwner, IERC677Receiver { +contract KeeperRegistrar is ITypeAndVersion, ConfirmedOwner, IERC677Receiver { /** * DISABLED: No auto approvals, all new upkeeps should be approved manually. * ENABLED_SENDER_ALLOWLIST: Auto approvals for allowed senders subject to max allowed. Manual for rest. diff --git a/contracts/src/v0.8/automation/v1_2/KeeperRegistry1_2.sol b/contracts/src/v0.8/automation/v1_2/KeeperRegistry1_2.sol index 2fa1ee6188b..5e1c8dacd48 100644 --- a/contracts/src/v0.8/automation/v1_2/KeeperRegistry1_2.sol +++ b/contracts/src/v0.8/automation/v1_2/KeeperRegistry1_2.sol @@ -6,7 +6,7 @@ import "@openzeppelin/contracts/utils/Address.sol"; import "@openzeppelin/contracts/security/Pausable.sol"; import "@openzeppelin/contracts/security/ReentrancyGuard.sol"; import "../KeeperBase.sol"; -import "../../interfaces/TypeAndVersionInterface.sol"; +import "../../shared/interfaces/ITypeAndVersion.sol"; import "../../shared/interfaces/AggregatorV3Interface.sol"; import "../interfaces/KeeperCompatibleInterface.sol"; import "../interfaces/v1_2/KeeperRegistryInterface1_2.sol"; @@ -31,7 +31,7 @@ struct Upkeep { * contracts. Clients must support the Upkeep interface. */ contract KeeperRegistry1_2 is - TypeAndVersionInterface, + ITypeAndVersion, ConfirmedOwner, KeeperBase, ReentrancyGuard, diff --git a/contracts/src/v0.8/automation/v1_3/KeeperRegistry1_3.sol b/contracts/src/v0.8/automation/v1_3/KeeperRegistry1_3.sol index dbef8d77d19..2d56443822b 100644 --- a/contracts/src/v0.8/automation/v1_3/KeeperRegistry1_3.sol +++ b/contracts/src/v0.8/automation/v1_3/KeeperRegistry1_3.sol @@ -8,7 +8,7 @@ import "./KeeperRegistryBase1_3.sol"; import "./KeeperRegistryLogic1_3.sol"; import {AutomationRegistryExecutableInterface, State} from "../interfaces/v1_3/AutomationRegistryInterface1_3.sol"; import "../interfaces/MigratableKeeperRegistryInterface.sol"; -import "../../interfaces/TypeAndVersionInterface.sol"; +import "../../shared/interfaces/ITypeAndVersion.sol"; import "../../shared/interfaces/IERC677Receiver.sol"; /** @@ -18,7 +18,7 @@ import "../../shared/interfaces/IERC677Receiver.sol"; contract KeeperRegistry1_3 is KeeperRegistryBase1_3, Proxy, - TypeAndVersionInterface, + ITypeAndVersion, AutomationRegistryExecutableInterface, MigratableKeeperRegistryInterface, IERC677Receiver diff --git a/contracts/src/v0.8/automation/v2_0/KeeperRegistrar2_0.sol b/contracts/src/v0.8/automation/v2_0/KeeperRegistrar2_0.sol index c1b7e45b859..78cc06a8b20 100644 --- a/contracts/src/v0.8/automation/v2_0/KeeperRegistrar2_0.sol +++ b/contracts/src/v0.8/automation/v2_0/KeeperRegistrar2_0.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.6; import "../../shared/interfaces/LinkTokenInterface.sol"; import "../interfaces/v2_0/AutomationRegistryInterface2_0.sol"; -import "../../interfaces/TypeAndVersionInterface.sol"; +import "../../shared/interfaces/ITypeAndVersion.sol"; import "../../shared/access/ConfirmedOwner.sol"; import "../../shared/interfaces/IERC677Receiver.sol"; @@ -17,7 +17,7 @@ import "../../shared/interfaces/IERC677Receiver.sol"; * The idea is to have same interface(functions,events) for UI or anyone using this contract irrespective of auto approve being enabled or not. * they can just listen to `RegistrationRequested` & `RegistrationApproved` events and know the status on registrations. */ -contract KeeperRegistrar2_0 is TypeAndVersionInterface, ConfirmedOwner, IERC677Receiver { +contract KeeperRegistrar2_0 is ITypeAndVersion, ConfirmedOwner, IERC677Receiver { /** * DISABLED: No auto approvals, all new upkeeps should be approved manually. * ENABLED_SENDER_ALLOWLIST: Auto approvals for allowed senders subject to max allowed. Manual for rest. diff --git a/contracts/src/v0.8/automation/v2_0/UpkeepTranscoder3_0.sol b/contracts/src/v0.8/automation/v2_0/UpkeepTranscoder3_0.sol index 0a56f209cc8..df8368de691 100644 --- a/contracts/src/v0.8/automation/v2_0/UpkeepTranscoder3_0.sol +++ b/contracts/src/v0.8/automation/v2_0/UpkeepTranscoder3_0.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.6; import "../../automation/interfaces/UpkeepTranscoderInterface.sol"; -import "../../interfaces/TypeAndVersionInterface.sol"; +import "../../shared/interfaces/ITypeAndVersion.sol"; import {Upkeep as UpkeepV1} from "../../automation/v1_2/KeeperRegistry1_2.sol"; import {Upkeep as UpkeepV2} from "../../automation/v1_3/KeeperRegistryBase1_3.sol"; import {Upkeep as UpkeepV3} from "../../automation/v2_0/KeeperRegistryBase2_0.sol"; @@ -13,7 +13,7 @@ import "../../automation/UpkeepFormat.sol"; * @notice UpkeepTranscoder 3_0 allows converting upkeep data from previous keeper registry versions 1.2 and 1.3 to * registry 2.0 */ -contract UpkeepTranscoder3_0 is UpkeepTranscoderInterface, TypeAndVersionInterface { +contract UpkeepTranscoder3_0 is UpkeepTranscoderInterface, ITypeAndVersion { error InvalidTranscoding(); /** diff --git a/contracts/src/v0.8/automation/v2_1/AutomationRegistrar2_1.sol b/contracts/src/v0.8/automation/v2_1/AutomationRegistrar2_1.sol index 407dda2414e..503f16bbe4c 100644 --- a/contracts/src/v0.8/automation/v2_1/AutomationRegistrar2_1.sol +++ b/contracts/src/v0.8/automation/v2_1/AutomationRegistrar2_1.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.16; import {LinkTokenInterface} from "../../shared/interfaces/LinkTokenInterface.sol"; import {IKeeperRegistryMaster} from "../interfaces/v2_1/IKeeperRegistryMaster.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IERC677Receiver} from "../../shared/interfaces/IERC677Receiver.sol"; @@ -17,7 +17,7 @@ import {IERC677Receiver} from "../../shared/interfaces/IERC677Receiver.sol"; * The idea is to have same interface(functions,events) for UI or anyone using this contract irrespective of auto approve being enabled or not. * they can just listen to `RegistrationRequested` & `RegistrationApproved` events and know the status on registrations. */ -contract AutomationRegistrar2_1 is TypeAndVersionInterface, ConfirmedOwner, IERC677Receiver { +contract AutomationRegistrar2_1 is ITypeAndVersion, ConfirmedOwner, IERC677Receiver { /** * DISABLED: No auto approvals, all new upkeeps should be approved manually. * ENABLED_SENDER_ALLOWLIST: Auto approvals for allowed senders subject to max allowed. Manual for rest. diff --git a/contracts/src/v0.8/automation/v2_1/UpkeepTranscoder4_0.sol b/contracts/src/v0.8/automation/v2_1/UpkeepTranscoder4_0.sol index 53b681d4cc1..41f50de0932 100644 --- a/contracts/src/v0.8/automation/v2_1/UpkeepTranscoder4_0.sol +++ b/contracts/src/v0.8/automation/v2_1/UpkeepTranscoder4_0.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.16; import {UpkeepTranscoderInterfaceV2} from "../interfaces/UpkeepTranscoderInterfaceV2.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {KeeperRegistryBase2_1 as R21} from "./KeeperRegistryBase2_1.sol"; import {IAutomationForwarder} from "../interfaces/IAutomationForwarder.sol"; @@ -52,7 +52,7 @@ struct UpkeepV20 { * @notice UpkeepTranscoder allows converting upkeep data from previous keeper registry versions 1.2, 1.3, and * 2.0 to registry 2.1 */ -contract UpkeepTranscoder4_0 is UpkeepTranscoderInterfaceV2, TypeAndVersionInterface { +contract UpkeepTranscoder4_0 is UpkeepTranscoderInterfaceV2, ITypeAndVersion { error InvalidTranscoding(); /** diff --git a/contracts/src/v0.8/automation/v2_3/AutomationRegistrar2_3.sol b/contracts/src/v0.8/automation/v2_3/AutomationRegistrar2_3.sol index 2effb8d4d2f..251611cfb04 100644 --- a/contracts/src/v0.8/automation/v2_3/AutomationRegistrar2_3.sol +++ b/contracts/src/v0.8/automation/v2_3/AutomationRegistrar2_3.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.19; import {LinkTokenInterface} from "../../shared/interfaces/LinkTokenInterface.sol"; import {IAutomationRegistryMaster2_3} from "../interfaces/v2_3/IAutomationRegistryMaster2_3.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IERC677Receiver} from "../../shared/interfaces/IERC677Receiver.sol"; import {IERC20Metadata as IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/extensions/IERC20Metadata.sol"; @@ -21,7 +21,7 @@ import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/tok * The idea is to have same interface(functions,events) for UI or anyone using this contract irrespective of auto approve being enabled or not. * they can just listen to `RegistrationRequested` & `RegistrationApproved` events and know the status on registrations. */ -contract AutomationRegistrar2_3 is TypeAndVersionInterface, ConfirmedOwner, IERC677Receiver { +contract AutomationRegistrar2_3 is ITypeAndVersion, ConfirmedOwner, IERC677Receiver { using SafeERC20 for IERC20; /** diff --git a/contracts/src/v0.8/automation/v2_3/UpkeepTranscoder5_0.sol b/contracts/src/v0.8/automation/v2_3/UpkeepTranscoder5_0.sol index 32530c71257..e0312588ed9 100644 --- a/contracts/src/v0.8/automation/v2_3/UpkeepTranscoder5_0.sol +++ b/contracts/src/v0.8/automation/v2_3/UpkeepTranscoder5_0.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.19; import {UpkeepTranscoderInterfaceV2} from "../interfaces/UpkeepTranscoderInterfaceV2.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; enum RegistryVersion { V12, @@ -17,7 +17,7 @@ enum RegistryVersion { * @notice UpkeepTranscoder is a contract that allows converting upkeep data from previous registry versions to newer versions * @dev it currently only supports 2.3 -> 2.3 migrations */ -contract UpkeepTranscoder5_0 is UpkeepTranscoderInterfaceV2, TypeAndVersionInterface { +contract UpkeepTranscoder5_0 is UpkeepTranscoderInterfaceV2, ITypeAndVersion { error InvalidTranscoding(); string public constant override typeAndVersion = "UpkeepTranscoder 5.0.0"; diff --git a/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getTokenPrice.t.sol b/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getTokenPrice.t.sol index a06e4cbebf8..a0f0a1076d8 100644 --- a/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getTokenPrice.t.sol +++ b/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getTokenPrice.t.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.24; -import {MockV3Aggregator} from "../../../tests/MockV3Aggregator.sol"; +import {MockV3Aggregator} from "../../../shared/mocks/MockV3Aggregator.sol"; import {FeeQuoter} from "../../FeeQuoter.sol"; import {Internal} from "../../libraries/Internal.sol"; import {FeeQuoterSetup} from "./FeeQuoterSetup.t.sol"; diff --git a/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getValidatedTokenPrice.t.sol b/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getValidatedTokenPrice.t.sol index d43cc5a6799..b5603a61306 100644 --- a/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getValidatedTokenPrice.t.sol +++ b/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoter.getValidatedTokenPrice.t.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.24; -import {MockV3Aggregator} from "../../../tests/MockV3Aggregator.sol"; +import {MockV3Aggregator} from "../../../shared/mocks/MockV3Aggregator.sol"; import {FeeQuoter} from "../../FeeQuoter.sol"; import {Internal} from "../../libraries/Internal.sol"; import {FeeQuoterSetup} from "./FeeQuoterSetup.t.sol"; diff --git a/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoterSetup.t.sol b/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoterSetup.t.sol index 7864d4080a2..e001ccd47cf 100644 --- a/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoterSetup.t.sol +++ b/contracts/src/v0.8/ccip/test/feeQuoter/FeeQuoterSetup.t.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: BUSL-1.1 pragma solidity ^0.8.24; -import {MockV3Aggregator} from "../../../tests/MockV3Aggregator.sol"; +import {MockV3Aggregator} from "../../../shared/mocks/MockV3Aggregator.sol"; import {FeeQuoter} from "../../FeeQuoter.sol"; import {Client} from "../../libraries/Client.sol"; import {Internal} from "../../libraries/Internal.sol"; diff --git a/contracts/src/v0.8/functions/tests/v1_X/Setup.t.sol b/contracts/src/v0.8/functions/tests/v1_X/Setup.t.sol index 444fc18fe81..f0069231e87 100644 --- a/contracts/src/v0.8/functions/tests/v1_X/Setup.t.sol +++ b/contracts/src/v0.8/functions/tests/v1_X/Setup.t.sol @@ -7,10 +7,10 @@ import {FunctionsRouterHarness, FunctionsRouter} from "./testhelpers/FunctionsRo import {FunctionsCoordinatorHarness} from "./testhelpers/FunctionsCoordinatorHarness.sol"; import {FunctionsBilling} from "../../dev/v1_X/FunctionsBilling.sol"; import {FunctionsResponse} from "../../dev/v1_X/libraries/FunctionsResponse.sol"; -import {MockV3Aggregator} from "../../../tests/MockV3Aggregator.sol"; +import {MockV3Aggregator} from "../../../shared/mocks/MockV3Aggregator.sol"; import {TermsOfServiceAllowList} from "../../dev/v1_X/accessControl/TermsOfServiceAllowList.sol"; import {TermsOfServiceAllowListConfig} from "../../dev/v1_X/accessControl/interfaces/ITermsOfServiceAllowList.sol"; -import {MockLinkToken} from "../../../mocks/MockLinkToken.sol"; +import {MockLinkToken} from "./testhelpers/MockLinkToken.sol"; import {FunctionsBillingConfig} from "../../dev/v1_X/interfaces/IFunctionsBilling.sol"; import "forge-std/Vm.sol"; diff --git a/contracts/src/v0.8/mocks/MockLinkToken.sol b/contracts/src/v0.8/functions/tests/v1_X/testhelpers/MockLinkToken.sol similarity index 94% rename from contracts/src/v0.8/mocks/MockLinkToken.sol rename to contracts/src/v0.8/functions/tests/v1_X/testhelpers/MockLinkToken.sol index a68f1b1d341..37ab5f50d56 100644 --- a/contracts/src/v0.8/mocks/MockLinkToken.sol +++ b/contracts/src/v0.8/functions/tests/v1_X/testhelpers/MockLinkToken.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {IERC677Receiver} from "../shared/interfaces/IERC677Receiver.sol"; +import {IERC677Receiver} from "../../../../shared/interfaces/IERC677Receiver.sol"; contract MockLinkToken { uint256 private constant TOTAL_SUPPLY = 1_000_000_000 * 1e18; diff --git a/contracts/src/v0.8/interfaces/FeedRegistryInterface.sol b/contracts/src/v0.8/interfaces/FeedRegistryInterface.sol deleted file mode 100644 index 6e353a79263..00000000000 --- a/contracts/src/v0.8/interfaces/FeedRegistryInterface.sol +++ /dev/null @@ -1,124 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; -pragma abicoder v2; - -import {AggregatorV2V3Interface} from "../shared/interfaces/AggregatorV2V3Interface.sol"; - -// solhint-disable-next-line interface-starts-with-i -interface FeedRegistryInterface { - struct Phase { - uint16 phaseId; - uint80 startingAggregatorRoundId; - uint80 endingAggregatorRoundId; - } - - event FeedProposed( - address indexed asset, - address indexed denomination, - address indexed proposedAggregator, - address currentAggregator, - address sender - ); - event FeedConfirmed( - address indexed asset, - address indexed denomination, - address indexed latestAggregator, - address previousAggregator, - uint16 nextPhaseId, - address sender - ); - - // V3 AggregatorV3Interface - - function decimals(address base, address quote) external view returns (uint8); - - function description(address base, address quote) external view returns (string memory); - - function version(address base, address quote) external view returns (uint256); - - function latestRoundData( - address base, - address quote - ) external view returns (uint80 roundId, int256 answer, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound); - - function getRoundData( - address base, - address quote, - uint80 _roundId - ) external view returns (uint80 roundId, int256 answer, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound); - - // V2 AggregatorInterface - - function latestAnswer(address base, address quote) external view returns (int256 answer); - - function latestTimestamp(address base, address quote) external view returns (uint256 timestamp); - - function latestRound(address base, address quote) external view returns (uint256 roundId); - - function getAnswer(address base, address quote, uint256 roundId) external view returns (int256 answer); - - function getTimestamp(address base, address quote, uint256 roundId) external view returns (uint256 timestamp); - - // Registry getters - - function getFeed(address base, address quote) external view returns (AggregatorV2V3Interface aggregator); - - function getPhaseFeed( - address base, - address quote, - uint16 phaseId - ) external view returns (AggregatorV2V3Interface aggregator); - - function isFeedEnabled(address aggregator) external view returns (bool); - - function getPhase(address base, address quote, uint16 phaseId) external view returns (Phase memory phase); - - // Round helpers - - function getRoundFeed( - address base, - address quote, - uint80 roundId - ) external view returns (AggregatorV2V3Interface aggregator); - - function getPhaseRange( - address base, - address quote, - uint16 phaseId - ) external view returns (uint80 startingRoundId, uint80 endingRoundId); - - function getPreviousRoundId( - address base, - address quote, - uint80 roundId - ) external view returns (uint80 previousRoundId); - - function getNextRoundId(address base, address quote, uint80 roundId) external view returns (uint80 nextRoundId); - - // Feed management - - function proposeFeed(address base, address quote, address aggregator) external; - - function confirmFeed(address base, address quote, address aggregator) external; - - // Proposed aggregator - - function getProposedFeed( - address base, - address quote - ) external view returns (AggregatorV2V3Interface proposedAggregator); - - function proposedGetRoundData( - address base, - address quote, - uint80 roundId - ) external view returns (uint80 id, int256 answer, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound); - - function proposedLatestRoundData( - address base, - address quote - ) external view returns (uint80 id, int256 answer, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound); - - // Phases - function getCurrentPhaseId(address base, address quote) external view returns (uint16 currentPhaseId); -} diff --git a/contracts/src/v0.8/interfaces/FlagsInterface.sol b/contracts/src/v0.8/interfaces/FlagsInterface.sol deleted file mode 100644 index beb2b581e3f..00000000000 --- a/contracts/src/v0.8/interfaces/FlagsInterface.sol +++ /dev/null @@ -1,17 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -// solhint-disable-next-line interface-starts-with-i -interface FlagsInterface { - function getFlag(address) external view returns (bool); - - function getFlags(address[] calldata) external view returns (bool[] memory); - - function raiseFlag(address) external; - - function raiseFlags(address[] calldata) external; - - function lowerFlags(address[] calldata) external; - - function setRaisingAccessController(address) external; -} diff --git a/contracts/src/v0.8/interfaces/PoRAddressList.sol b/contracts/src/v0.8/interfaces/PoRAddressList.sol deleted file mode 100644 index af06e29a456..00000000000 --- a/contracts/src/v0.8/interfaces/PoRAddressList.sol +++ /dev/null @@ -1,29 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -/** - * @title Chainlink Proof-of-Reserve address list interface. - * @notice This interface enables Chainlink nodes to get the list addresses to be used in a PoR feed. A single - * contract that implements this interface can only store an address list for a single PoR feed. - * @dev All functions in this interface are expected to be called off-chain, so gas usage is not a big concern. - * This makes it possible to store addresses in optimized data types and convert them to human-readable strings - * in `getPoRAddressList()`. - */ -// solhint-disable-next-line interface-starts-with-i -interface PoRAddressList { - /// @notice Get total number of addresses in the list. - function getPoRAddressListLength() external view returns (uint256); - - /** - * @notice Get a batch of human-readable addresses from the address list. The requested batch size can be greater - * than the actual address list size, in which the full address list will be returned. - * @dev Due to limitations of gas usage in off-chain calls, we need to support fetching the addresses in batches. - * EVM addresses need to be converted to human-readable strings. The address strings need to be in the same format - * that would be used when querying the balance of that address. - * @param startIndex The index of the first address in the batch. - * @param endIndex The index of the last address in the batch. If `endIndex > getPoRAddressListLength()-1`, - * endIndex need to default to `getPoRAddressListLength()-1`. - * @return Array of addresses as strings. - */ - function getPoRAddressList(uint256 startIndex, uint256 endIndex) external view returns (string[] memory); -} diff --git a/contracts/src/v0.8/interfaces/TypeAndVersionInterface.sol b/contracts/src/v0.8/interfaces/TypeAndVersionInterface.sol deleted file mode 100644 index 786f2750acf..00000000000 --- a/contracts/src/v0.8/interfaces/TypeAndVersionInterface.sol +++ /dev/null @@ -1,6 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -abstract contract TypeAndVersionInterface { - function typeAndVersion() external pure virtual returns (string memory); -} diff --git a/contracts/src/v0.8/tests/FeedConsumer.sol b/contracts/src/v0.8/l2ep/test/FeedConsumer.sol similarity index 92% rename from contracts/src/v0.8/tests/FeedConsumer.sol rename to contracts/src/v0.8/l2ep/test/FeedConsumer.sol index c9fc62357a6..f83781b5ac1 100644 --- a/contracts/src/v0.8/tests/FeedConsumer.sol +++ b/contracts/src/v0.8/l2ep/test/FeedConsumer.sol @@ -1,9 +1,10 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {AggregatorV2V3Interface} from "../shared/interfaces/AggregatorV2V3Interface.sol"; +import {AggregatorV2V3Interface} from "../../shared/interfaces/AggregatorV2V3Interface.sol"; contract FeedConsumer { + // solhint-disable-next-line AggregatorV2V3Interface public immutable AGGREGATOR; constructor(address feedAddress) { diff --git a/contracts/src/v0.8/tests/Greeter.sol b/contracts/src/v0.8/l2ep/test/Greeter.sol similarity index 82% rename from contracts/src/v0.8/tests/Greeter.sol rename to contracts/src/v0.8/l2ep/test/Greeter.sol index 88ccca560de..313c7c5e3b0 100644 --- a/contracts/src/v0.8/tests/Greeter.sol +++ b/contracts/src/v0.8/l2ep/test/Greeter.sol @@ -1,7 +1,8 @@ pragma solidity ^0.8.0; -import "../shared/access/ConfirmedOwner.sol"; +import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; +// solhint-disable contract Greeter is ConfirmedOwner { string public greeting; diff --git a/contracts/src/v0.8/tests/MockArbitrumInbox.sol b/contracts/src/v0.8/l2ep/test/mocks/MockArbitrumInbox.sol similarity index 94% rename from contracts/src/v0.8/tests/MockArbitrumInbox.sol rename to contracts/src/v0.8/l2ep/test/mocks/MockArbitrumInbox.sol index 445a361b309..3ec76338b8c 100644 --- a/contracts/src/v0.8/tests/MockArbitrumInbox.sol +++ b/contracts/src/v0.8/l2ep/test/mocks/MockArbitrumInbox.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.9; -import {IInbox} from "../vendor/arb-bridge-eth/v0.8.0-custom/contracts/bridge/interfaces/IInbox.sol"; -import {IBridge} from "../vendor/arb-bridge-eth/v0.8.0-custom/contracts/bridge/interfaces/IBridge.sol"; +import {IInbox} from "../../../vendor/arb-bridge-eth/v0.8.0-custom/contracts/bridge/interfaces/IInbox.sol"; +import {IBridge} from "../../../vendor/arb-bridge-eth/v0.8.0-custom/contracts/bridge/interfaces/IBridge.sol"; contract MockArbitrumInbox is IInbox { event RetryableTicketNoRefundAliasRewriteCreated( diff --git a/contracts/src/v0.8/tests/MockOptimismL1CrossDomainMessenger.sol b/contracts/src/v0.8/l2ep/test/mocks/MockOptimismL1CrossDomainMessenger.sol similarity index 100% rename from contracts/src/v0.8/tests/MockOptimismL1CrossDomainMessenger.sol rename to contracts/src/v0.8/l2ep/test/mocks/MockOptimismL1CrossDomainMessenger.sol diff --git a/contracts/src/v0.8/tests/MockOptimismL2CrossDomainMessenger.sol b/contracts/src/v0.8/l2ep/test/mocks/MockOptimismL2CrossDomainMessenger.sol similarity index 100% rename from contracts/src/v0.8/tests/MockOptimismL2CrossDomainMessenger.sol rename to contracts/src/v0.8/l2ep/test/mocks/MockOptimismL2CrossDomainMessenger.sol diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/L2EPTest.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/L2EPTest.t.sol index 93640f4bcb4..0bd377a7cbf 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/L2EPTest.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/L2EPTest.t.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.24; -import {Greeter} from "../../../tests/Greeter.sol"; +import {Greeter} from "../Greeter.sol"; import {MultiSend} from "../../../vendor/MultiSend.sol"; import {Test} from "forge-std/Test.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainForwarder.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainForwarder.t.sol index e0a76a2b37a..62f7cd5651e 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainForwarder.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainForwarder.t.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.24; import {ArbitrumCrossDomainForwarder} from "../../../arbitrum/ArbitrumCrossDomainForwarder.sol"; -import {Greeter} from "../../../../tests/Greeter.sol"; +import {Greeter} from "../../Greeter.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; contract ArbitrumCrossDomainForwarderTest is L2EPTest { diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainGovernor.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainGovernor.t.sol index 746da3d1cef..45f67d52ccd 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainGovernor.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumCrossDomainGovernor.t.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.24; import {ArbitrumCrossDomainGovernor} from "../../../arbitrum/ArbitrumCrossDomainGovernor.sol"; -import {Greeter} from "../../../../tests/Greeter.sol"; +import {Greeter} from "../../Greeter.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; import {MultiSend} from "../../../../vendor/MultiSend.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumSequencerUptimeFeed.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumSequencerUptimeFeed.t.sol index deaa81977b7..1474b680ec2 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumSequencerUptimeFeed.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumSequencerUptimeFeed.t.sol @@ -4,7 +4,7 @@ pragma solidity 0.8.24; import {SimpleWriteAccessController} from "../../../../shared/access/SimpleWriteAccessController.sol"; import {ArbitrumSequencerUptimeFeed} from "../../../arbitrum/ArbitrumSequencerUptimeFeed.sol"; import {MockAggregatorV2V3} from "../../mocks/MockAggregatorV2V3.sol"; -import {FeedConsumer} from "../../../../tests/FeedConsumer.sol"; +import {FeedConsumer} from "../../FeedConsumer.sol"; import {Flags} from "../../../Flags.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumValidator.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumValidator.t.sol index 95278e644b1..7497ae198e2 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumValidator.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/arbitrum/ArbitrumValidator.t.sol @@ -6,7 +6,7 @@ import {AccessControllerInterface} from "../../../../shared/interfaces/AccessCon import {SimpleWriteAccessController} from "../../../../shared/access/SimpleWriteAccessController.sol"; import {ArbitrumSequencerUptimeFeed} from "../../../arbitrum/ArbitrumSequencerUptimeFeed.sol"; import {ArbitrumValidator} from "../../../arbitrum/ArbitrumValidator.sol"; -import {MockArbitrumInbox} from "../../../../tests/MockArbitrumInbox.sol"; +import {MockArbitrumInbox} from "../../mocks/MockArbitrumInbox.sol"; import {MockAggregatorV2V3} from "../../mocks/MockAggregatorV2V3.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainForwarder.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainForwarder.t.sol index 28d70fa35a5..5562b413e3b 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainForwarder.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainForwarder.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {OptimismCrossDomainForwarder} from "../../../optimism/OptimismCrossDomainForwarder.sol"; import {MockOVMCrossDomainMessenger} from "../../mocks/optimism/MockOVMCrossDomainMessenger.sol"; -import {Greeter} from "../../../../tests/Greeter.sol"; +import {Greeter} from "../../Greeter.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; contract OptimismCrossDomainForwarderTest is L2EPTest { diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainGovernor.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainGovernor.t.sol index 57f79124512..3328a89b89d 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainGovernor.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismCrossDomainGovernor.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {OptimismCrossDomainGovernor} from "../../../optimism/OptimismCrossDomainGovernor.sol"; import {MockOVMCrossDomainMessenger} from "../../mocks/optimism/MockOVMCrossDomainMessenger.sol"; -import {Greeter} from "../../../../tests/Greeter.sol"; +import {Greeter} from "../../Greeter.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; import {MultiSend} from "../../../../vendor/MultiSend.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismSequencerUptimeFeed.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismSequencerUptimeFeed.t.sol index 34010c313e8..393da70d79a 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismSequencerUptimeFeed.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismSequencerUptimeFeed.t.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.24; -import {MockOptimismL1CrossDomainMessenger} from "../../../../tests/MockOptimismL1CrossDomainMessenger.sol"; -import {MockOptimismL2CrossDomainMessenger} from "../../../../tests/MockOptimismL2CrossDomainMessenger.sol"; +import {MockOptimismL1CrossDomainMessenger} from "../../mocks/MockOptimismL1CrossDomainMessenger.sol"; +import {MockOptimismL2CrossDomainMessenger} from "../../mocks/MockOptimismL2CrossDomainMessenger.sol"; import {OptimismSequencerUptimeFeed} from "../../../optimism/OptimismSequencerUptimeFeed.sol"; import {BaseSequencerUptimeFeed} from "../../../base/BaseSequencerUptimeFeed.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismValidator.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismValidator.t.sol index 48ff1f7778d..6fb00e708c5 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismValidator.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/optimism/OptimismValidator.t.sol @@ -3,8 +3,8 @@ pragma solidity 0.8.24; import {ISequencerUptimeFeed} from "../../../interfaces/ISequencerUptimeFeed.sol"; -import {MockOptimismL1CrossDomainMessenger} from "../../../../tests/MockOptimismL1CrossDomainMessenger.sol"; -import {MockOptimismL2CrossDomainMessenger} from "../../../../tests/MockOptimismL2CrossDomainMessenger.sol"; +import {MockOptimismL1CrossDomainMessenger} from "../../mocks/MockOptimismL1CrossDomainMessenger.sol"; +import {MockOptimismL2CrossDomainMessenger} from "../../mocks/MockOptimismL2CrossDomainMessenger.sol"; import {OptimismSequencerUptimeFeed} from "../../../optimism/OptimismSequencerUptimeFeed.sol"; import {OptimismValidator} from "../../../optimism/OptimismValidator.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainForwarder.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainForwarder.t.sol index 0025c6b9937..d28df02e975 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainForwarder.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainForwarder.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {MockScrollCrossDomainMessenger} from "../../mocks/scroll/MockScrollCrossDomainMessenger.sol"; import {ScrollCrossDomainForwarder} from "../../../scroll/ScrollCrossDomainForwarder.sol"; -import {Greeter} from "../../../../tests/Greeter.sol"; +import {Greeter} from "../../Greeter.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; contract ScrollCrossDomainForwarderTest is L2EPTest { diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainGovernor.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainGovernor.t.sol index a2523e5feb6..544923f49f5 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainGovernor.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/scroll/ScrollCrossDomainGovernor.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.24; import {MockScrollCrossDomainMessenger} from "../../mocks/scroll/MockScrollCrossDomainMessenger.sol"; import {ScrollCrossDomainGovernor} from "../../../scroll/ScrollCrossDomainGovernor.sol"; -import {Greeter} from "../../../../tests/Greeter.sol"; +import {Greeter} from "../../Greeter.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; import {MultiSend} from "../../../../vendor/MultiSend.sol"; diff --git a/contracts/src/v0.8/l2ep/test/v1_0_0/shared/BaseSequencerUptimeFeed.t.sol b/contracts/src/v0.8/l2ep/test/v1_0_0/shared/BaseSequencerUptimeFeed.t.sol index 20553e33bab..367aa00a620 100644 --- a/contracts/src/v0.8/l2ep/test/v1_0_0/shared/BaseSequencerUptimeFeed.t.sol +++ b/contracts/src/v0.8/l2ep/test/v1_0_0/shared/BaseSequencerUptimeFeed.t.sol @@ -5,7 +5,7 @@ import {Vm} from "forge-std/Test.sol"; import {AddressAliasHelper} from "../../../../vendor/arb-bridge-eth/v0.8.0-custom/contracts/libraries/AddressAliasHelper.sol"; import {BaseSequencerUptimeFeed} from "../../../base/BaseSequencerUptimeFeed.sol"; import {MockBaseSequencerUptimeFeed} from "../../../test/mocks/MockBaseSequencerUptimeFeed.sol"; -import {FeedConsumer} from "../../../../tests/FeedConsumer.sol"; +import {FeedConsumer} from "../../FeedConsumer.sol"; import {L2EPTest} from "../L2EPTest.t.sol"; contract BaseSequencerUptimeFeed_Setup is L2EPTest { diff --git a/contracts/src/v0.8/llo-feeds/v0.3.0/FeeManager.sol b/contracts/src/v0.8/llo-feeds/v0.3.0/FeeManager.sol index 44f550e3253..71f2f50fcb8 100644 --- a/contracts/src/v0.8/llo-feeds/v0.3.0/FeeManager.sol +++ b/contracts/src/v0.8/llo-feeds/v0.3.0/FeeManager.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IFeeManager} from "./interfaces/IFeeManager.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {Common} from "../libraries/Common.sol"; import {IRewardManager} from "./interfaces/IRewardManager.sol"; @@ -19,7 +19,7 @@ import {IVerifierFeeManager} from "./interfaces/IVerifierFeeManager.sol"; * @author Austin Born * @notice This contract is used for the handling of fees required for users verifying reports. */ -contract FeeManager is IFeeManager, ConfirmedOwner, TypeAndVersionInterface { +contract FeeManager is IFeeManager, ConfirmedOwner, ITypeAndVersion { using SafeERC20 for IERC20; /// @notice list of subscribers and their discounts subscriberDiscounts[subscriber][feedId][token] @@ -158,7 +158,7 @@ contract FeeManager is IFeeManager, ConfirmedOwner, TypeAndVersionInterface { _; } - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "FeeManager 2.0.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.3.0/RewardManager.sol b/contracts/src/v0.8/llo-feeds/v0.3.0/RewardManager.sol index 49fef51c569..9e9a58857c7 100644 --- a/contracts/src/v0.8/llo-feeds/v0.3.0/RewardManager.sol +++ b/contracts/src/v0.8/llo-feeds/v0.3.0/RewardManager.sol @@ -4,7 +4,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IRewardManager} from "./interfaces/IRewardManager.sol"; import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC20.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {Common} from "../libraries/Common.sol"; import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; @@ -14,7 +14,7 @@ import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/tok * @author Austin Born * @notice This contract will be used to reward any configured recipients within a pool. Recipients will receive a share of their pool relative to their configured weight. */ -contract RewardManager is IRewardManager, ConfirmedOwner, TypeAndVersionInterface { +contract RewardManager is IRewardManager, ConfirmedOwner, ITypeAndVersion { using SafeERC20 for IERC20; // @dev The mapping of total fees collected for a particular pot: s_totalRewardRecipientFees[poolId] @@ -73,7 +73,7 @@ contract RewardManager is IRewardManager, ConfirmedOwner, TypeAndVersionInterfac i_linkAddress = linkAddress; } - // @inheritdoc TypeAndVersionInterface + // @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "RewardManager 1.1.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.3.0/Verifier.sol b/contracts/src/v0.8/llo-feeds/v0.3.0/Verifier.sol index fe5742108a5..ce4fe974bd9 100644 --- a/contracts/src/v0.8/llo-feeds/v0.3.0/Verifier.sol +++ b/contracts/src/v0.8/llo-feeds/v0.3.0/Verifier.sol @@ -4,7 +4,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IVerifier} from "./interfaces/IVerifier.sol"; import {IVerifierProxy} from "./interfaces/IVerifierProxy.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {Common} from "../libraries/Common.sol"; @@ -18,7 +18,7 @@ uint256 constant MAX_NUM_ORACLES = 31; * a feed. The verifier contract is used to verify that such reports have * been signed by the correct signers. **/ -contract Verifier is IVerifier, ConfirmedOwner, TypeAndVersionInterface { +contract Verifier is IVerifier, ConfirmedOwner, ITypeAndVersion { // The first byte of the mask can be 0, because we only ever have 31 oracles uint256 internal constant ORACLE_MASK = 0x0001010101010101010101010101010101010101010101010101010101010101; @@ -193,7 +193,7 @@ contract Verifier is IVerifier, ConfirmedOwner, TypeAndVersionInterface { return interfaceId == this.verify.selector; } - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "Verifier 1.2.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.3.0/VerifierProxy.sol b/contracts/src/v0.8/llo-feeds/v0.3.0/VerifierProxy.sol index c06312dd7be..e66b937f153 100644 --- a/contracts/src/v0.8/llo-feeds/v0.3.0/VerifierProxy.sol +++ b/contracts/src/v0.8/llo-feeds/v0.3.0/VerifierProxy.sol @@ -4,7 +4,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IVerifierProxy} from "./interfaces/IVerifierProxy.sol"; import {IVerifier} from "./interfaces/IVerifier.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {AccessControllerInterface} from "../../shared/interfaces/AccessControllerInterface.sol"; import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {IVerifierFeeManager} from "./interfaces/IVerifierFeeManager.sol"; @@ -15,7 +15,7 @@ import {Common} from "../libraries/Common.sol"; * on a chain. It is responsible for taking in a verification request and routing * it to the correct verifier contract. */ -contract VerifierProxy is IVerifierProxy, ConfirmedOwner, TypeAndVersionInterface { +contract VerifierProxy is IVerifierProxy, ConfirmedOwner, ITypeAndVersion { /// @notice This event is emitted whenever a new verifier contract is set /// @param oldConfigDigest The config digest that was previously the latest config /// digest of the verifier contract at the verifier address. @@ -115,7 +115,7 @@ contract VerifierProxy is IVerifierProxy, ConfirmedOwner, TypeAndVersionInterfac _; } - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "VerifierProxy 2.0.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationFeeManager.sol b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationFeeManager.sol index 08ac1d45f58..eb35cdc7956 100644 --- a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationFeeManager.sol +++ b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationFeeManager.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {Common} from "../libraries/Common.sol"; import {IWERC20} from "../../shared/interfaces/IWERC20.sol"; @@ -23,7 +23,7 @@ contract DestinationFeeManager is IDestinationFeeManager, IDestinationVerifierFeeManager, ConfirmedOwner, - TypeAndVersionInterface + ITypeAndVersion { using SafeERC20 for IERC20; @@ -164,7 +164,7 @@ contract DestinationFeeManager is _; } - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "DestinationFeeManager 0.4.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationRewardManager.sol b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationRewardManager.sol index 4b4c1f50efd..9f66a423cb6 100644 --- a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationRewardManager.sol +++ b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationRewardManager.sol @@ -4,7 +4,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IDestinationRewardManager} from "./interfaces/IDestinationRewardManager.sol"; import {IERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC20.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {Common} from "../libraries/Common.sol"; import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/token/ERC20/utils/SafeERC20.sol"; @@ -14,7 +14,7 @@ import {SafeERC20} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/tok * @author Austin Born * @notice This contract will be used to reward any configured recipients within a pool. Recipients will receive a share of their pool relative to their configured weight. */ -contract DestinationRewardManager is IDestinationRewardManager, ConfirmedOwner, TypeAndVersionInterface { +contract DestinationRewardManager is IDestinationRewardManager, ConfirmedOwner, ITypeAndVersion { using SafeERC20 for IERC20; // @dev The mapping of total fees collected for a particular pot: s_totalRewardRecipientFees[poolId] @@ -73,7 +73,7 @@ contract DestinationRewardManager is IDestinationRewardManager, ConfirmedOwner, i_linkAddress = linkAddress; } - // @inheritdoc TypeAndVersionInterface + // @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "DestinationRewardManager 0.4.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifier.sol b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifier.sol index 8ab0f6acc23..545a0d60727 100644 --- a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifier.sol +++ b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifier.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {IDestinationVerifier} from "./interfaces/IDestinationVerifier.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {Common} from "../libraries/Common.sol"; import {IAccessController} from "../../shared/interfaces/IAccessController.sol"; @@ -23,7 +23,7 @@ contract DestinationVerifier is IDestinationVerifier, IDestinationVerifierProxyVerifier, ConfirmedOwner, - TypeAndVersionInterface + ITypeAndVersion { /// @notice The list of DON configurations by hash(address|donConfigId) - set to true if the signer is part of the config mapping(bytes32 => bool) private s_signerByAddressAndDonConfigId; @@ -436,7 +436,7 @@ contract DestinationVerifier is interfaceId == type(IDestinationVerifierProxyVerifier).interfaceId; } - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "DestinationVerifier 0.4.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifierProxy.sol b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifierProxy.sol index 6790883ba31..6a16dc20cb3 100644 --- a/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifierProxy.sol +++ b/contracts/src/v0.8/llo-feeds/v0.4.0/DestinationVerifierProxy.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IERC165} from "../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {IDestinationVerifierProxy} from "./interfaces/IDestinationVerifierProxy.sol"; import {IDestinationVerifierProxyVerifier} from "./interfaces/IDestinationVerifierProxyVerifier.sol"; @@ -12,7 +12,7 @@ import {IDestinationVerifierProxyVerifier} from "./interfaces/IDestinationVerifi * @author Michael Fletcher * @notice This contract will be used to route all requests through to the assigned verifier contract. This contract does not support individual feed configurations and is aimed at being a simple proxy for the verifier contract on any destination chain. */ -contract DestinationVerifierProxy is IDestinationVerifierProxy, ConfirmedOwner, TypeAndVersionInterface { +contract DestinationVerifierProxy is IDestinationVerifierProxy, ConfirmedOwner, ITypeAndVersion { /// @notice The active verifier for this proxy IDestinationVerifierProxyVerifier private s_verifier; @@ -24,7 +24,7 @@ contract DestinationVerifierProxy is IDestinationVerifierProxy, ConfirmedOwner, constructor() ConfirmedOwner(msg.sender) {} - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "DestinationVerifierProxy 0.4.0"; } diff --git a/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/ChannelConfigStore.sol b/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/ChannelConfigStore.sol index f5e5040bb8f..465292d9e0c 100644 --- a/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/ChannelConfigStore.sol +++ b/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/ChannelConfigStore.sol @@ -3,9 +3,9 @@ pragma solidity ^0.8.19; import {ConfirmedOwner} from "../../../shared/access/ConfirmedOwner.sol"; import {IChannelConfigStore} from "./interfaces/IChannelConfigStore.sol"; -import {TypeAndVersionInterface} from "../../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; -contract ChannelConfigStore is ConfirmedOwner, IChannelConfigStore, TypeAndVersionInterface { +contract ChannelConfigStore is ConfirmedOwner, IChannelConfigStore, ITypeAndVersion { event NewChannelDefinition(uint256 indexed donId, uint32 version, string url, bytes32 sha); constructor() ConfirmedOwner(msg.sender) {} diff --git a/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/Configurator.sol b/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/Configurator.sol index c946b3e2508..9b72f3d4fec 100644 --- a/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/Configurator.sol +++ b/contracts/src/v0.8/llo-feeds/v0.5.0/configuration/Configurator.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../../shared/access/ConfirmedOwner.sol"; -import {TypeAndVersionInterface} from "../../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../../shared/interfaces/ITypeAndVersion.sol"; import {IERC165} from "../../../vendor/openzeppelin-solidity/v4.8.3/contracts/interfaces/IERC165.sol"; import {IConfigurator} from "./interfaces/IConfigurator.sol"; @@ -18,7 +18,7 @@ uint256 constant MIN_SUPPORTED_ONCHAIN_CONFIG_VERSION = 1; * @notice This contract is intended to be deployed on the source chain and acts as a OCR3 configurator for LLO/Mercury **/ -contract Configurator is IConfigurator, ConfirmedOwner, TypeAndVersionInterface, IERC165 { +contract Configurator is IConfigurator, ConfirmedOwner, ITypeAndVersion, IERC165 { /// @notice This error is thrown whenever trying to set a config /// with a fault tolerance of 0 error FaultToleranceMustBePositive(); @@ -334,7 +334,7 @@ contract Configurator is IConfigurator, ConfirmedOwner, TypeAndVersionInterface, return interfaceId == type(IConfigurator).interfaceId; } - /// @inheritdoc TypeAndVersionInterface + /// @inheritdoc ITypeAndVersion function typeAndVersion() external pure override returns (string memory) { return "Configurator 0.5.0"; } diff --git a/contracts/src/v0.8/mocks/MockAggregatorValidator.sol b/contracts/src/v0.8/mocks/MockAggregatorValidator.sol deleted file mode 100644 index bdc935cd231..00000000000 --- a/contracts/src/v0.8/mocks/MockAggregatorValidator.sol +++ /dev/null @@ -1,30 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import "../shared/interfaces/AggregatorValidatorInterface.sol"; - -contract MockAggregatorValidator is AggregatorValidatorInterface { - uint8 immutable id; - - constructor(uint8 id_) { - id = id_; - } - - event ValidateCalled( - uint8 id, - uint256 previousRoundId, - int256 previousAnswer, - uint256 currentRoundId, - int256 currentAnswer - ); - - function validate( - uint256 previousRoundId, - int256 previousAnswer, - uint256 currentRoundId, - int256 currentAnswer - ) external override returns (bool) { - emit ValidateCalled(id, previousRoundId, previousAnswer, currentRoundId, currentAnswer); - return true; - } -} diff --git a/contracts/src/v0.8/mocks/MockOffchainAggregator.sol b/contracts/src/v0.8/mocks/MockOffchainAggregator.sol deleted file mode 100644 index 5366bbee0b0..00000000000 --- a/contracts/src/v0.8/mocks/MockOffchainAggregator.sol +++ /dev/null @@ -1,14 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity 0.8.6; - -contract MockOffchainAggregator { - event RoundIdUpdated(uint80 roundId); - - uint80 public roundId; - - function requestNewRound() external returns (uint80) { - roundId++; - emit RoundIdUpdated(roundId); - return roundId; - } -} diff --git a/contracts/src/v0.8/Chainlink.sol b/contracts/src/v0.8/operatorforwarder/Chainlink.sol similarity index 96% rename from contracts/src/v0.8/Chainlink.sol rename to contracts/src/v0.8/operatorforwarder/Chainlink.sol index e511cfc8085..f3ee84cb11e 100644 --- a/contracts/src/v0.8/Chainlink.sol +++ b/contracts/src/v0.8/operatorforwarder/Chainlink.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {CBORChainlink} from "./vendor/CBORChainlink.sol"; -import {BufferChainlink} from "./vendor/BufferChainlink.sol"; +import {CBORChainlink} from "../vendor/CBORChainlink.sol"; +import {BufferChainlink} from "../vendor/BufferChainlink.sol"; /** * @title Library for common Chainlink functions diff --git a/contracts/src/v0.8/ChainlinkClient.sol b/contracts/src/v0.8/operatorforwarder/ChainlinkClient.sol similarity index 98% rename from contracts/src/v0.8/ChainlinkClient.sol rename to contracts/src/v0.8/operatorforwarder/ChainlinkClient.sol index 1d8640a27b2..c619683cbb1 100644 --- a/contracts/src/v0.8/ChainlinkClient.sol +++ b/contracts/src/v0.8/operatorforwarder/ChainlinkClient.sol @@ -3,11 +3,11 @@ pragma solidity ^0.8.0; import {Chainlink} from "./Chainlink.sol"; import {ENSInterface} from "./interfaces/ENSInterface.sol"; -import {LinkTokenInterface} from "./shared/interfaces/LinkTokenInterface.sol"; +import {LinkTokenInterface} from "../shared/interfaces/LinkTokenInterface.sol"; import {ChainlinkRequestInterface} from "./interfaces/ChainlinkRequestInterface.sol"; import {OperatorInterface} from "./interfaces/OperatorInterface.sol"; import {PointerInterface} from "./interfaces/PointerInterface.sol"; -import {ENSResolver as ENSResolver_Chainlink} from "./vendor/ENSResolver.sol"; +import {ENSResolver as ENSResolver_Chainlink} from "../vendor/ENSResolver.sol"; /** * @title The ChainlinkClient contract diff --git a/contracts/src/v0.8/operatorforwarder/Operator.sol b/contracts/src/v0.8/operatorforwarder/Operator.sol index 64882e43cda..ff22558a098 100644 --- a/contracts/src/v0.8/operatorforwarder/Operator.sol +++ b/contracts/src/v0.8/operatorforwarder/Operator.sol @@ -6,10 +6,10 @@ import {LinkTokenReceiver} from "./LinkTokenReceiver.sol"; import {ConfirmedOwner} from "../shared/access/ConfirmedOwner.sol"; import {LinkTokenInterface} from "../shared/interfaces/LinkTokenInterface.sol"; import {IAuthorizedReceiver} from "./interfaces/IAuthorizedReceiver.sol"; -import {OperatorInterface} from "../interfaces/OperatorInterface.sol"; +import {OperatorInterface} from "./interfaces/OperatorInterface.sol"; import {IOwnable} from "../shared/interfaces/IOwnable.sol"; import {IWithdrawal} from "./interfaces/IWithdrawal.sol"; -import {OracleInterface} from "../interfaces/OracleInterface.sol"; +import {OracleInterface} from "./interfaces/OracleInterface.sol"; import {SafeCast} from "../vendor/openzeppelin-solidity/v4.8.3/contracts/utils/math/SafeCast.sol"; // @title The Chainlink Operator contract diff --git a/contracts/src/v0.8/interfaces/ChainlinkRequestInterface.sol b/contracts/src/v0.8/operatorforwarder/interfaces/ChainlinkRequestInterface.sol similarity index 100% rename from contracts/src/v0.8/interfaces/ChainlinkRequestInterface.sol rename to contracts/src/v0.8/operatorforwarder/interfaces/ChainlinkRequestInterface.sol diff --git a/contracts/src/v0.8/interfaces/ENSInterface.sol b/contracts/src/v0.8/operatorforwarder/interfaces/ENSInterface.sol similarity index 100% rename from contracts/src/v0.8/interfaces/ENSInterface.sol rename to contracts/src/v0.8/operatorforwarder/interfaces/ENSInterface.sol diff --git a/contracts/src/v0.8/interfaces/OperatorInterface.sol b/contracts/src/v0.8/operatorforwarder/interfaces/OperatorInterface.sol similarity index 100% rename from contracts/src/v0.8/interfaces/OperatorInterface.sol rename to contracts/src/v0.8/operatorforwarder/interfaces/OperatorInterface.sol diff --git a/contracts/src/v0.8/interfaces/OracleInterface.sol b/contracts/src/v0.8/operatorforwarder/interfaces/OracleInterface.sol similarity index 100% rename from contracts/src/v0.8/interfaces/OracleInterface.sol rename to contracts/src/v0.8/operatorforwarder/interfaces/OracleInterface.sol diff --git a/contracts/src/v0.8/interfaces/PointerInterface.sol b/contracts/src/v0.8/operatorforwarder/interfaces/PointerInterface.sol similarity index 100% rename from contracts/src/v0.8/interfaces/PointerInterface.sol rename to contracts/src/v0.8/operatorforwarder/interfaces/PointerInterface.sol diff --git a/contracts/src/v0.8/tests/Broken.sol b/contracts/src/v0.8/operatorforwarder/test/Broken.sol similarity index 95% rename from contracts/src/v0.8/tests/Broken.sol rename to contracts/src/v0.8/operatorforwarder/test/Broken.sol index 21fa9b014e9..6edfbd88d51 100644 --- a/contracts/src/v0.8/tests/Broken.sol +++ b/contracts/src/v0.8/operatorforwarder/test/Broken.sol @@ -1,6 +1,7 @@ pragma solidity ^0.8.0; // Broken is a contract to aid debugging and testing reverting calls during development. +// solhint-disable contract Broken { error Unauthorized(string reason, int256 reason2); diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/ChainlinkClientHelper.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/ChainlinkClientHelper.sol index 9b6ba6bb432..1efd93114d9 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/ChainlinkClientHelper.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/ChainlinkClientHelper.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {ChainlinkClient} from "../../../ChainlinkClient.sol"; +import {ChainlinkClient} from "../../ChainlinkClient.sol"; contract ChainlinkClientHelper is ChainlinkClient { bytes4 public constant FULFILL_SELECTOR = this.fulfill.selector; diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/Chainlinked.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/Chainlinked.sol index dba5d407623..67fda6452cd 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/Chainlinked.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/Chainlinked.sol @@ -1,6 +1,6 @@ pragma solidity ^0.8.0; -import {ChainlinkClient, Chainlink} from "../../../ChainlinkClient.sol"; +import {ChainlinkClient, Chainlink} from "../../ChainlinkClient.sol"; /** * @title The Chainlinked contract diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/Consumer.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/Consumer.sol index 3ec32dd6a29..b422081084e 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/Consumer.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/Consumer.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {ChainlinkClient, ChainlinkRequestInterface, LinkTokenInterface} from "../../../ChainlinkClient.sol"; -import {Chainlink} from "../../../Chainlink.sol"; +import {ChainlinkClient, ChainlinkRequestInterface, LinkTokenInterface} from "../../ChainlinkClient.sol"; +import {Chainlink} from "../../Chainlink.sol"; contract Consumer is ChainlinkClient { using Chainlink for Chainlink.Request; diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/EmptyOracle.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/EmptyOracle.sol index f278791d2bb..6a4c281995a 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/EmptyOracle.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/EmptyOracle.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {ChainlinkRequestInterface} from "../../../interfaces/ChainlinkRequestInterface.sol"; -import {OracleInterface} from "../../../interfaces/OracleInterface.sol"; +import {ChainlinkRequestInterface} from "../../interfaces/ChainlinkRequestInterface.sol"; +import {OracleInterface} from "../../interfaces/OracleInterface.sol"; /* solhint-disable no-empty-blocks */ contract EmptyOracle is ChainlinkRequestInterface, OracleInterface { diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/GasGuzzlingConsumer.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/GasGuzzlingConsumer.sol index 029102018b0..040eeec394e 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/GasGuzzlingConsumer.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/GasGuzzlingConsumer.sol @@ -2,7 +2,7 @@ pragma solidity ^0.8.0; import {Consumer} from "./Consumer.sol"; -import {Chainlink} from "../../../Chainlink.sol"; +import {Chainlink} from "../../Chainlink.sol"; contract GasGuzzlingConsumer is Consumer { using Chainlink for Chainlink.Request; diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousMultiWordConsumer.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousMultiWordConsumer.sol index 93af16f64fd..ad65927b40b 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousMultiWordConsumer.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousMultiWordConsumer.sol @@ -1,8 +1,8 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {ChainlinkClient} from "../../../ChainlinkClient.sol"; -import {Chainlink} from "../../../Chainlink.sol"; +import {ChainlinkClient} from "../../ChainlinkClient.sol"; +import {Chainlink} from "../../Chainlink.sol"; contract MaliciousMultiWordConsumer is ChainlinkClient { uint256 private constant ORACLE_PAYMENT = 1 ether; diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousRequester.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousRequester.sol index c01c8a60bb7..8864d8fdffb 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousRequester.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/MaliciousRequester.sol @@ -2,7 +2,7 @@ pragma solidity ^0.8.0; import {MaliciousChainlink} from "./MaliciousChainlink.sol"; import {MaliciousChainlinked, Chainlink} from "./MaliciousChainlinked.sol"; -import {ChainlinkRequestInterface} from "../../../interfaces/ChainlinkRequestInterface.sol"; +import {ChainlinkRequestInterface} from "../../interfaces/ChainlinkRequestInterface.sol"; contract MaliciousRequester is MaliciousChainlinked { uint256 private constant ORACLE_PAYMENT = 1 ether; diff --git a/contracts/src/v0.8/operatorforwarder/test/testhelpers/MultiWordConsumer.sol b/contracts/src/v0.8/operatorforwarder/test/testhelpers/MultiWordConsumer.sol index b3fdfcb813a..50420807cf9 100644 --- a/contracts/src/v0.8/operatorforwarder/test/testhelpers/MultiWordConsumer.sol +++ b/contracts/src/v0.8/operatorforwarder/test/testhelpers/MultiWordConsumer.sol @@ -1,7 +1,7 @@ pragma solidity ^0.8.0; -import {ChainlinkClient, ChainlinkRequestInterface, LinkTokenInterface} from "../../../ChainlinkClient.sol"; -import {Chainlink} from "../../../Chainlink.sol"; +import {ChainlinkClient, ChainlinkRequestInterface, LinkTokenInterface} from "../../ChainlinkClient.sol"; +import {Chainlink} from "../../Chainlink.sol"; contract MultiWordConsumer is ChainlinkClient { using Chainlink for Chainlink.Request; diff --git a/contracts/src/v0.8/tests/MockV3Aggregator.sol b/contracts/src/v0.8/shared/mocks/MockV3Aggregator.sol similarity index 95% rename from contracts/src/v0.8/tests/MockV3Aggregator.sol rename to contracts/src/v0.8/shared/mocks/MockV3Aggregator.sol index 9822d23e853..a405b7f6bef 100644 --- a/contracts/src/v0.8/tests/MockV3Aggregator.sol +++ b/contracts/src/v0.8/shared/mocks/MockV3Aggregator.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import "../shared/interfaces/AggregatorV2V3Interface.sol"; +import {AggregatorV2V3Interface} from "../interfaces/AggregatorV2V3Interface.sol"; /** * @title MockV3Aggregator @@ -11,6 +11,7 @@ import "../shared/interfaces/AggregatorV2V3Interface.sol"; * aggregator contract, but how the aggregator got * its answer is unimportant */ +// solhint-disable contract MockV3Aggregator is AggregatorV2V3Interface { uint256 public constant override version = 0; diff --git a/contracts/src/v0.8/tests/LogEmitter.sol b/contracts/src/v0.8/shared/test/helpers/LogEmitter.sol similarity index 97% rename from contracts/src/v0.8/tests/LogEmitter.sol rename to contracts/src/v0.8/shared/test/helpers/LogEmitter.sol index 37306cc2bc5..4bf9e9e5674 100644 --- a/contracts/src/v0.8/tests/LogEmitter.sol +++ b/contracts/src/v0.8/shared/test/helpers/LogEmitter.sol @@ -1,6 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; +// solhint-disable contract LogEmitter { event Log1(uint256); event Log2(uint256 indexed); diff --git a/contracts/src/v0.8/tests/VRFLogEmitter.sol b/contracts/src/v0.8/shared/test/helpers/VRFLogEmitter.sol similarity index 100% rename from contracts/src/v0.8/tests/VRFLogEmitter.sol rename to contracts/src/v0.8/shared/test/helpers/VRFLogEmitter.sol diff --git a/contracts/src/v0.8/ChainSpecificUtil.sol b/contracts/src/v0.8/shared/util/ChainSpecificUtil.sol similarity index 95% rename from contracts/src/v0.8/ChainSpecificUtil.sol rename to contracts/src/v0.8/shared/util/ChainSpecificUtil.sol index c5052cd9b25..d541f5f8486 100644 --- a/contracts/src/v0.8/ChainSpecificUtil.sol +++ b/contracts/src/v0.8/shared/util/ChainSpecificUtil.sol @@ -1,9 +1,9 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.9; -import {ArbSys} from "./vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; -import {ArbGasInfo} from "./vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; -import {OVM_GasPriceOracle} from "./vendor/@eth-optimism/contracts/v0.8.9/contracts/L2/predeploys/OVM_GasPriceOracle.sol"; +import {ArbSys} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; +import {ArbGasInfo} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; +import {OVM_GasPriceOracle} from "../../vendor/@eth-optimism/contracts/v0.8.9/contracts/L2/predeploys/OVM_GasPriceOracle.sol"; /// @dev A library that abstracts out opcodes that behave differently across chains. /// @dev The methods below return values that are pertinent to the given chain. diff --git a/contracts/src/v0.8/tests/ChainlinkClientTestHelper.sol b/contracts/src/v0.8/tests/ChainlinkClientTestHelper.sol deleted file mode 100644 index a344138a17d..00000000000 --- a/contracts/src/v0.8/tests/ChainlinkClientTestHelper.sol +++ /dev/null @@ -1,83 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import "../ChainlinkClient.sol"; - -contract ChainlinkClientTestHelper is ChainlinkClient { - constructor(address _link, address _oracle) { - _setChainlinkToken(_link); - _setChainlinkOracle(_oracle); - } - - event Request(bytes32 id, address callbackAddress, bytes4 callbackfunctionSelector, bytes data); - event LinkAmount(uint256 amount); - - function publicNewRequest(bytes32 _id, address _address, bytes memory _fulfillmentSignature) public { - Chainlink.Request memory req = _buildChainlinkRequest(_id, _address, bytes4(keccak256(_fulfillmentSignature))); - emit Request(req.id, req.callbackAddress, req.callbackFunctionId, req.buf.buf); - } - - function publicRequest(bytes32 _id, address _address, bytes memory _fulfillmentSignature, uint256 _wei) public { - Chainlink.Request memory req = _buildChainlinkRequest(_id, _address, bytes4(keccak256(_fulfillmentSignature))); - _sendChainlinkRequest(req, _wei); - } - - function publicRequestRunTo( - address _oracle, - bytes32 _id, - address _address, - bytes memory _fulfillmentSignature, - uint256 _wei - ) public { - Chainlink.Request memory run = _buildChainlinkRequest(_id, _address, bytes4(keccak256(_fulfillmentSignature))); - _sendChainlinkRequestTo(_oracle, run, _wei); - } - - function publicRequestOracleData(bytes32 _id, bytes memory _fulfillmentSignature, uint256 _wei) public { - Chainlink.Request memory req = _buildOperatorRequest(_id, bytes4(keccak256(_fulfillmentSignature))); - _sendOperatorRequest(req, _wei); - } - - function publicRequestOracleDataFrom( - address _oracle, - bytes32 _id, - bytes memory _fulfillmentSignature, - uint256 _wei - ) public { - Chainlink.Request memory run = _buildOperatorRequest(_id, bytes4(keccak256(_fulfillmentSignature))); - _sendOperatorRequestTo(_oracle, run, _wei); - } - - function publicCancelRequest( - bytes32 _requestId, - uint256 _payment, - bytes4 _callbackFunctionId, - uint256 _expiration - ) public { - _cancelChainlinkRequest(_requestId, _payment, _callbackFunctionId, _expiration); - } - - function publicChainlinkToken() public view returns (address) { - return _chainlinkTokenAddress(); - } - - function publicFulfillChainlinkRequest(bytes32 _requestId, bytes32) public { - fulfillRequest(_requestId, bytes32(0)); - } - - function fulfillRequest(bytes32 _requestId, bytes32) public { - _validateChainlinkCallback(_requestId); - } - - function publicLINK(uint256 _amount) public { - emit LinkAmount(LINK_DIVISIBILITY * _amount); - } - - function publicOracleAddress() public view returns (address) { - return _chainlinkOracleAddress(); - } - - function publicAddExternalRequest(address _oracle, bytes32 _requestId) public { - _addChainlinkExternalRequest(_oracle, _requestId); - } -} diff --git a/contracts/src/v0.8/tests/ChainlinkTestHelper.sol b/contracts/src/v0.8/tests/ChainlinkTestHelper.sol deleted file mode 100644 index d42f30c374d..00000000000 --- a/contracts/src/v0.8/tests/ChainlinkTestHelper.sol +++ /dev/null @@ -1,57 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import "../Chainlink.sol"; -import "../vendor/CBORChainlink.sol"; -import "../vendor/BufferChainlink.sol"; - -contract ChainlinkTestHelper { - using Chainlink for Chainlink.Request; - using CBORChainlink for BufferChainlink.buffer; - - Chainlink.Request private req; - - event RequestData(bytes payload); - - function closeEvent() public { - emit RequestData(req.buf.buf); - } - - function setBuffer(bytes memory data) public { - Chainlink.Request memory r2 = req; - r2._setBuffer(data); - req = r2; - } - - function add(string memory _key, string memory _value) public { - Chainlink.Request memory r2 = req; - r2._add(_key, _value); - req = r2; - } - - function addBytes(string memory _key, bytes memory _value) public { - Chainlink.Request memory r2 = req; - r2._addBytes(_key, _value); - req = r2; - } - - function addInt(string memory _key, int256 _value) public { - Chainlink.Request memory r2 = req; - r2._addInt(_key, _value); - req = r2; - } - - function addUint(string memory _key, uint256 _value) public { - Chainlink.Request memory r2 = req; - r2._addUint(_key, _value); - req = r2; - } - - // Temporarily have method receive bytes32[] memory until experimental - // string[] memory can be invoked from truffle tests. - function addStringArray(string memory _key, string[] memory _values) public { - Chainlink.Request memory r2 = req; - r2._addStringArray(_key, _values); - req = r2; - } -} diff --git a/contracts/src/v0.8/tests/Counter.sol b/contracts/src/v0.8/tests/Counter.sol deleted file mode 100644 index 1ceb7891490..00000000000 --- a/contracts/src/v0.8/tests/Counter.sol +++ /dev/null @@ -1,26 +0,0 @@ -// SPDX-License-Identifier: MIT - -pragma solidity ^0.8.0; - -contract Counter { - error AlwaysRevert(); - - uint256 public count = 0; - - function increment() public returns (uint256) { - count += 1; - return count; - } - - function reset() public { - count = 0; - } - - function alwaysRevert() public pure { - revert AlwaysRevert(); - } - - function alwaysRevertWithString() public pure { - revert("always revert"); - } -} diff --git a/contracts/src/v0.8/tests/FlagsTestHelper.sol b/contracts/src/v0.8/tests/FlagsTestHelper.sol deleted file mode 100644 index 3e35cae8911..00000000000 --- a/contracts/src/v0.8/tests/FlagsTestHelper.sol +++ /dev/null @@ -1,20 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import "../Flags.sol"; - -contract FlagsTestHelper { - Flags public flags; - - constructor(address flagsContract) { - flags = Flags(flagsContract); - } - - function getFlag(address subject) external view returns (bool) { - return flags.getFlag(subject); - } - - function getFlags(address[] calldata subjects) external view returns (bool[] memory) { - return flags.getFlags(subjects); - } -} diff --git a/contracts/src/v0.8/tests/MockETHLINKAggregator.sol b/contracts/src/v0.8/tests/MockETHLINKAggregator.sol deleted file mode 100644 index d685aac7314..00000000000 --- a/contracts/src/v0.8/tests/MockETHLINKAggregator.sol +++ /dev/null @@ -1,44 +0,0 @@ -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -import "../shared/interfaces/AggregatorV3Interface.sol"; - -contract MockETHLINKAggregator is AggregatorV3Interface { - int256 public answer; - - constructor(int256 _answer) public { - answer = _answer; - } - - function decimals() external view override returns (uint8) { - return 18; - } - - function description() external view override returns (string memory) { - return "MockETHLINKAggregator"; - } - - function version() external view override returns (uint256) { - return 1; - } - - function getRoundData( - uint80 _roundId - ) - external - view - override - returns (uint80 roundId, int256 ans, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound) - { - return (1, answer, block.timestamp, block.timestamp, 1); - } - - function latestRoundData() - external - view - override - returns (uint80 roundId, int256 ans, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound) - { - return (1, answer, block.timestamp, block.timestamp, 1); - } -} diff --git a/contracts/src/v0.8/vrf/BatchBlockhashStore.sol b/contracts/src/v0.8/vrf/BatchBlockhashStore.sol index cf29f148a54..4ed6f28d381 100644 --- a/contracts/src/v0.8/vrf/BatchBlockhashStore.sol +++ b/contracts/src/v0.8/vrf/BatchBlockhashStore.sol @@ -2,7 +2,7 @@ // solhint-disable-next-line one-contract-per-file pragma solidity 0.8.19; -import {ChainSpecificUtil} from "../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../shared/util/ChainSpecificUtil.sol"; /** * @title BatchBlockhashStore diff --git a/contracts/src/v0.8/ChainSpecificUtil_v0_8_6.sol b/contracts/src/v0.8/vrf/ChainSpecificUtil_v0_8_6.sol similarity index 96% rename from contracts/src/v0.8/ChainSpecificUtil_v0_8_6.sol rename to contracts/src/v0.8/vrf/ChainSpecificUtil_v0_8_6.sol index 0379dc86ca0..eabc061e3f5 100644 --- a/contracts/src/v0.8/ChainSpecificUtil_v0_8_6.sol +++ b/contracts/src/v0.8/vrf/ChainSpecificUtil_v0_8_6.sol @@ -1,9 +1,9 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.6; -import {ArbSys} from "./vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; -import {ArbGasInfo} from "./vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; -import {OVM_GasPriceOracle} from "./vendor/@eth-optimism/contracts/v0.8.6/contracts/L2/predeploys/OVM_GasPriceOracle.sol"; +import {ArbSys} from "../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; +import {ArbGasInfo} from "../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; +import {OVM_GasPriceOracle} from "../vendor/@eth-optimism/contracts/v0.8.6/contracts/L2/predeploys/OVM_GasPriceOracle.sol"; /// @dev A library that abstracts out opcodes that behave differently across chains. /// @dev The methods below return values that are pertinent to the given chain. diff --git a/contracts/src/v0.8/vrf/VRFCoordinatorV2.sol b/contracts/src/v0.8/vrf/VRFCoordinatorV2.sol index 717826a3b95..ab0eecd6c45 100644 --- a/contracts/src/v0.8/vrf/VRFCoordinatorV2.sol +++ b/contracts/src/v0.8/vrf/VRFCoordinatorV2.sol @@ -5,13 +5,13 @@ import {LinkTokenInterface} from "../shared/interfaces/LinkTokenInterface.sol"; import {BlockhashStoreInterface} from "./interfaces/BlockhashStoreInterface.sol"; import {AggregatorV3Interface} from "../shared/interfaces/AggregatorV3Interface.sol"; import {VRFCoordinatorV2Interface} from "./interfaces/VRFCoordinatorV2Interface.sol"; -import {TypeAndVersionInterface} from "../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../shared/interfaces/ITypeAndVersion.sol"; import {IERC677Receiver} from "../shared/interfaces/IERC677Receiver.sol"; import {VRF} from "./VRF.sol"; import {ConfirmedOwner} from "../shared/access/ConfirmedOwner.sol"; import {VRFConsumerBaseV2} from "./VRFConsumerBaseV2.sol"; -import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; -contract VRFCoordinatorV2 is VRF, ConfirmedOwner, TypeAndVersionInterface, VRFCoordinatorV2Interface, IERC677Receiver { +import {ChainSpecificUtil} from "./ChainSpecificUtil_v0_8_6.sol"; +contract VRFCoordinatorV2 is VRF, ConfirmedOwner, ITypeAndVersion, VRFCoordinatorV2Interface, IERC677Receiver { // solhint-disable-next-line chainlink-solidity/prefix-immutable-variables-with-i LinkTokenInterface public immutable LINK; // solhint-disable-next-line chainlink-solidity/prefix-immutable-variables-with-i diff --git a/contracts/src/v0.8/vrf/VRFV2Wrapper.sol b/contracts/src/v0.8/vrf/VRFV2Wrapper.sol index a656ef071f1..584136e3beb 100644 --- a/contracts/src/v0.8/vrf/VRFV2Wrapper.sol +++ b/contracts/src/v0.8/vrf/VRFV2Wrapper.sol @@ -3,20 +3,20 @@ pragma solidity ^0.8.6; import {ConfirmedOwner} from "../shared/access/ConfirmedOwner.sol"; -import {TypeAndVersionInterface} from "../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../shared/interfaces/ITypeAndVersion.sol"; import {VRFConsumerBaseV2} from "./VRFConsumerBaseV2.sol"; import {LinkTokenInterface} from "../shared/interfaces/LinkTokenInterface.sol"; import {AggregatorV3Interface} from "../shared/interfaces/AggregatorV3Interface.sol"; import {VRFCoordinatorV2Interface} from "./interfaces/VRFCoordinatorV2Interface.sol"; import {VRFV2WrapperInterface} from "./interfaces/VRFV2WrapperInterface.sol"; import {VRFV2WrapperConsumerBase} from "./VRFV2WrapperConsumerBase.sol"; -import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; +import {ChainSpecificUtil} from "./ChainSpecificUtil_v0_8_6.sol"; /** * @notice A wrapper for VRFCoordinatorV2 that provides an interface better suited to one-off * @notice requests for randomness. */ -contract VRFV2Wrapper is ConfirmedOwner, TypeAndVersionInterface, VRFConsumerBaseV2, VRFV2WrapperInterface { +contract VRFV2Wrapper is ConfirmedOwner, ITypeAndVersion, VRFConsumerBaseV2, VRFV2WrapperInterface { event WrapperFulfillmentFailed(uint256 indexed requestId, address indexed consumer); // solhint-disable-next-line chainlink-solidity/prefix-immutable-variables-with-i diff --git a/contracts/src/v0.8/vrf/dev/BlockhashStore.sol b/contracts/src/v0.8/vrf/dev/BlockhashStore.sol index 0bef7aeada5..8889060922b 100644 --- a/contracts/src/v0.8/vrf/dev/BlockhashStore.sol +++ b/contracts/src/v0.8/vrf/dev/BlockhashStore.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.19; -import {ChainSpecificUtil} from "../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../shared/util/ChainSpecificUtil.sol"; /** * @title BlockhashStore diff --git a/contracts/src/v0.8/vrf/dev/TrustedBlockhashStore.sol b/contracts/src/v0.8/vrf/dev/TrustedBlockhashStore.sol index b3b77c8095d..b6a770168e5 100644 --- a/contracts/src/v0.8/vrf/dev/TrustedBlockhashStore.sol +++ b/contracts/src/v0.8/vrf/dev/TrustedBlockhashStore.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity 0.8.19; -import {ChainSpecificUtil} from "../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../shared/util/ChainSpecificUtil.sol"; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {BlockhashStore} from "./BlockhashStore.sol"; diff --git a/contracts/src/v0.8/vrf/dev/VRFV2PlusWrapper.sol b/contracts/src/v0.8/vrf/dev/VRFV2PlusWrapper.sol index 40fd8a90612..fced5822642 100644 --- a/contracts/src/v0.8/vrf/dev/VRFV2PlusWrapper.sol +++ b/contracts/src/v0.8/vrf/dev/VRFV2PlusWrapper.sol @@ -2,7 +2,7 @@ pragma solidity 0.8.19; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {VRFConsumerBaseV2Plus} from "./VRFConsumerBaseV2Plus.sol"; import {LinkTokenInterface} from "../../shared/interfaces/LinkTokenInterface.sol"; import {AggregatorV3Interface} from "../../shared/interfaces/AggregatorV3Interface.sol"; @@ -15,7 +15,7 @@ import {VRFV2PlusWrapperConsumerBase} from "./VRFV2PlusWrapperConsumerBase.sol"; * @notice requests for randomness. */ // solhint-disable-next-line max-states-count -contract VRFV2PlusWrapper is ConfirmedOwner, TypeAndVersionInterface, VRFConsumerBaseV2Plus, IVRFV2PlusWrapper { +contract VRFV2PlusWrapper is ConfirmedOwner, ITypeAndVersion, VRFConsumerBaseV2Plus, IVRFV2PlusWrapper { event WrapperFulfillmentFailed(uint256 indexed requestId, address indexed consumer); // upper bound limit for premium percentages to make sure fee calculations don't overflow diff --git a/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorTestV2_5.sol b/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorTestV2_5.sol index 2e9c4a2da75..62dfddbee8d 100644 --- a/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorTestV2_5.sol +++ b/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorTestV2_5.sol @@ -5,7 +5,7 @@ import {BlockhashStoreInterface} from "../../interfaces/BlockhashStoreInterface. import {VRFOld} from "./VRFOld.sol"; import {VRFTypes} from "../../VRFTypes.sol"; import {VRFConsumerBaseV2Plus, IVRFMigratableConsumerV2Plus} from "../VRFConsumerBaseV2Plus.sol"; -import {ChainSpecificUtil} from "../../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../../shared/util/ChainSpecificUtil.sol"; import {SubscriptionAPI} from "../SubscriptionAPI.sol"; import {VRFV2PlusClient} from "../libraries/VRFV2PlusClient.sol"; import {IVRFCoordinatorV2PlusMigration} from "../interfaces/IVRFCoordinatorV2PlusMigration.sol"; diff --git a/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorV2PlusUpgradedVersion.sol b/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorV2PlusUpgradedVersion.sol index af5c56bde6c..c16a498fcb7 100644 --- a/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorV2PlusUpgradedVersion.sol +++ b/contracts/src/v0.8/vrf/dev/testhelpers/VRFCoordinatorV2PlusUpgradedVersion.sol @@ -7,7 +7,7 @@ import {IVRFCoordinatorV2Plus, IVRFSubscriptionV2Plus} from "../interfaces/IVRFC import {VRF} from "../../../vrf/VRF.sol"; import {VRFTypes} from "../../VRFTypes.sol"; import {VRFConsumerBaseV2Plus, IVRFMigratableConsumerV2Plus} from "../VRFConsumerBaseV2Plus.sol"; -import {ChainSpecificUtil} from "../../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../../shared/util/ChainSpecificUtil.sol"; import {SubscriptionAPI} from "../SubscriptionAPI.sol"; import {VRFV2PlusClient} from "../libraries/VRFV2PlusClient.sol"; import {IVRFCoordinatorV2PlusMigration} from "../interfaces/IVRFCoordinatorV2PlusMigration.sol"; diff --git a/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusLoadTestWithMetrics.sol b/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusLoadTestWithMetrics.sol index 87e70f60e35..f70c0331cd1 100644 --- a/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusLoadTestWithMetrics.sol +++ b/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusLoadTestWithMetrics.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {ChainSpecificUtil} from "../../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../../shared/util/ChainSpecificUtil.sol"; import {VRFConsumerBaseV2Plus} from "../VRFConsumerBaseV2Plus.sol"; import {VRFV2PlusClient} from "../libraries/VRFV2PlusClient.sol"; diff --git a/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusWrapperLoadTestConsumer.sol b/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusWrapperLoadTestConsumer.sol index 6935723d931..ae76fed365a 100644 --- a/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusWrapperLoadTestConsumer.sol +++ b/contracts/src/v0.8/vrf/dev/testhelpers/VRFV2PlusWrapperLoadTestConsumer.sol @@ -3,7 +3,7 @@ pragma solidity ^0.8.6; import {VRFV2PlusWrapperConsumerBase} from "../VRFV2PlusWrapperConsumerBase.sol"; import {ConfirmedOwner} from "../../../shared/access/ConfirmedOwner.sol"; -import {ChainSpecificUtil} from "../../../ChainSpecificUtil.sol"; +import {ChainSpecificUtil} from "../../../shared/util/ChainSpecificUtil.sol"; import {VRFV2PlusClient} from "../libraries/VRFV2PlusClient.sol"; contract VRFV2PlusWrapperLoadTestConsumer is VRFV2PlusWrapperConsumerBase, ConfirmedOwner { diff --git a/contracts/src/v0.8/vrf/test/ChainSpecificUtil.t.sol b/contracts/src/v0.8/vrf/test/ChainSpecificUtil.t.sol index efeb9027462..3e81dd2d3c9 100644 --- a/contracts/src/v0.8/vrf/test/ChainSpecificUtil.t.sol +++ b/contracts/src/v0.8/vrf/test/ChainSpecificUtil.t.sol @@ -1,7 +1,7 @@ pragma solidity 0.8.6; import "./BaseTest.t.sol"; -import {ChainSpecificUtil} from "../../ChainSpecificUtil_v0_8_6.sol"; +import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; import {ArbSys} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbSys.sol"; import {ArbGasInfo} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; diff --git a/contracts/src/v0.8/vrf/test/FixtureVRFCoordinatorV2_5.t.sol b/contracts/src/v0.8/vrf/test/FixtureVRFCoordinatorV2_5.t.sol index c1c2c7eb27c..3574143f6c5 100644 --- a/contracts/src/v0.8/vrf/test/FixtureVRFCoordinatorV2_5.t.sol +++ b/contracts/src/v0.8/vrf/test/FixtureVRFCoordinatorV2_5.t.sol @@ -8,8 +8,8 @@ import {BlockhashStore} from "../dev/BlockhashStore.sol"; import {VRFV2PlusClient} from "../dev/libraries/VRFV2PlusClient.sol"; import {ExposedVRFCoordinatorV2_5} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5.sol"; import {VRFV2PlusConsumerExample} from "../dev/testhelpers/VRFV2PlusConsumerExample.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import "./BaseTest.t.sol"; contract FixtureVRFCoordinatorV2_5 is BaseTest, VRF { diff --git a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Mock.t.sol b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Mock.t.sol index 1716118b765..c0c0a2a2f52 100644 --- a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Mock.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Mock.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.6; import "./BaseTest.t.sol"; import {VRF} from "../VRF.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {VRFCoordinatorV2Mock} from "../mocks/VRFCoordinatorV2Mock.sol"; import {VRFConsumerV2} from "../testhelpers/VRFConsumerV2.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Plus_Migration.t.sol b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Plus_Migration.t.sol index ad239592d41..2d12f5ec82e 100644 --- a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Plus_Migration.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2Plus_Migration.t.sol @@ -6,8 +6,8 @@ import {ExposedVRFCoordinatorV2_5} from "../dev/testhelpers/ExposedVRFCoordinato import {VRFCoordinatorV2_5} from "../dev/VRFCoordinatorV2_5.sol"; import {SubscriptionAPI} from "../dev/SubscriptionAPI.sol"; import {VRFV2PlusConsumerExample} from "../dev/testhelpers/VRFV2PlusConsumerExample.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {VRFV2PlusMaliciousMigrator} from "../dev/testhelpers/VRFV2PlusMaliciousMigrator.sol"; contract VRFCoordinatorV2Plus_Migration is BaseTest { diff --git a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5Mock.t.sol b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5Mock.t.sol index 75c763c88cb..d379ab9679d 100644 --- a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5Mock.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5Mock.t.sol @@ -5,7 +5,7 @@ import {VRFV2PlusClient} from "../dev/libraries/VRFV2PlusClient.sol"; import {SubscriptionAPI} from "../dev/SubscriptionAPI.sol"; import {VRFCoordinatorV2_5Mock} from "../mocks/VRFCoordinatorV2_5Mock.sol"; import {VRFConsumerV2Plus} from "../testhelpers/VRFConsumerV2Plus.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; contract VRFCoordinatorV2_5MockTest is BaseTest { MockLinkToken internal s_linkToken; diff --git a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Arbitrum.t.sol b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Arbitrum.t.sol index 8e47b800ee5..a6c2c88d016 100644 --- a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Arbitrum.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Arbitrum.t.sol @@ -1,8 +1,8 @@ pragma solidity 0.8.19; import "./BaseTest.t.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5_Arbitrum} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5_Arbitrum.sol"; import {BlockhashStore} from "../dev/BlockhashStore.sol"; import {ArbGasInfo} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Optimism.t.sol b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Optimism.t.sol index b54dbbaaa04..0ebec3b1c56 100644 --- a/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Optimism.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFCoordinatorV2_5_Optimism.t.sol @@ -1,8 +1,8 @@ pragma solidity 0.8.19; import "./BaseTest.t.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5_Optimism} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5_Optimism.sol"; import {OptimismL1Fees} from "../dev/OptimismL1Fees.sol"; import {BlockhashStore} from "../dev/BlockhashStore.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFV2Plus.t.sol b/contracts/src/v0.8/vrf/test/VRFV2Plus.t.sol index dd3f54b580a..5d8366b5c7f 100644 --- a/contracts/src/v0.8/vrf/test/VRFV2Plus.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFV2Plus.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.19; import "./BaseTest.t.sol"; import {VRF} from "../VRF.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5.sol"; import {VRFCoordinatorV2_5} from "../dev/VRFCoordinatorV2_5.sol"; import {SubscriptionAPI} from "../dev/SubscriptionAPI.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFV2PlusSubscriptionAPI.t.sol b/contracts/src/v0.8/vrf/test/VRFV2PlusSubscriptionAPI.t.sol index 4fbb44ea717..4e89c0ec5f7 100644 --- a/contracts/src/v0.8/vrf/test/VRFV2PlusSubscriptionAPI.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFV2PlusSubscriptionAPI.t.sol @@ -4,8 +4,8 @@ import "./BaseTest.t.sol"; import {ExposedVRFCoordinatorV2_5} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5.sol"; import {VRFV2PlusLoadTestWithMetrics} from "../dev/testhelpers/VRFV2PlusLoadTestWithMetrics.sol"; import {SubscriptionAPI} from "../dev/SubscriptionAPI.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import "@openzeppelin/contracts/utils/Strings.sol"; // for Strings.toString import {VmSafe} from "forge-std/Vm.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper.t.sol b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper.t.sol index 4b3a893fe1f..45e2131ce7a 100644 --- a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.19; import {BaseTest} from "./BaseTest.t.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5.sol"; import {SubscriptionAPI} from "../dev/SubscriptionAPI.sol"; import {VRFV2PlusWrapperConsumerExample} from "../dev/testhelpers/VRFV2PlusWrapperConsumerExample.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Arbitrum.t.sol b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Arbitrum.t.sol index 96f14847c41..f88dd15f2d5 100644 --- a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Arbitrum.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Arbitrum.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.19; import {BaseTest} from "./BaseTest.t.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5_Arbitrum} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5_Arbitrum.sol"; import {VRFV2PlusWrapper_Arbitrum} from "../dev/VRFV2PlusWrapper_Arbitrum.sol"; import {ArbGasInfo} from "../../vendor/@arbitrum/nitro-contracts/src/precompiles/ArbGasInfo.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Migration.t.sol b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Migration.t.sol index ba77686088e..26cc5a213ec 100644 --- a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Migration.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Migration.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.19; import {BaseTest} from "./BaseTest.t.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5.sol"; import {VRFCoordinatorV2Plus_V2Example} from "../dev/testhelpers/VRFCoordinatorV2Plus_V2Example.sol"; import {VRFV2PlusWrapperConsumerExample} from "../dev/testhelpers/VRFV2PlusWrapperConsumerExample.sol"; diff --git a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Optimism.t.sol b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Optimism.t.sol index a8a97a57f0e..de56a9a7e2b 100644 --- a/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Optimism.t.sol +++ b/contracts/src/v0.8/vrf/test/VRFV2PlusWrapper_Optimism.t.sol @@ -2,8 +2,8 @@ pragma solidity 0.8.19; import {BaseTest} from "./BaseTest.t.sol"; -import {MockLinkToken} from "../../mocks/MockLinkToken.sol"; -import {MockV3Aggregator} from "../../tests/MockV3Aggregator.sol"; +import {MockLinkToken} from "../../functions/tests/v1_X/testhelpers/MockLinkToken.sol"; +import {MockV3Aggregator} from "../../shared/mocks/MockV3Aggregator.sol"; import {ExposedVRFCoordinatorV2_5_Optimism} from "../dev/testhelpers/ExposedVRFCoordinatorV2_5_Optimism.sol"; import {VRFV2PlusWrapper_Optimism} from "../dev/VRFV2PlusWrapper_Optimism.sol"; import {OptimismL1Fees} from "../dev/OptimismL1Fees.sol"; diff --git a/contracts/src/v0.8/vrf/testhelpers/ChainSpecificUtilHelper.sol b/contracts/src/v0.8/vrf/testhelpers/ChainSpecificUtilHelper.sol index 16a157e3547..96a088a652e 100644 --- a/contracts/src/v0.8/vrf/testhelpers/ChainSpecificUtilHelper.sol +++ b/contracts/src/v0.8/vrf/testhelpers/ChainSpecificUtilHelper.sol @@ -1,7 +1,7 @@ // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; -import {ChainSpecificUtil} from "../../ChainSpecificUtil_v0_8_6.sol"; +import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; /// @dev A helper contract that exposes ChainSpecificUtil methods for testing contract ChainSpecificUtilHelper { diff --git a/contracts/src/v0.8/vrf/testhelpers/VRFCoordinatorTestV2.sol b/contracts/src/v0.8/vrf/testhelpers/VRFCoordinatorTestV2.sol index 5774b770750..5c42a4070dc 100644 --- a/contracts/src/v0.8/vrf/testhelpers/VRFCoordinatorTestV2.sol +++ b/contracts/src/v0.8/vrf/testhelpers/VRFCoordinatorTestV2.sol @@ -5,19 +5,13 @@ import {LinkTokenInterface} from "../../shared/interfaces/LinkTokenInterface.sol import {BlockhashStoreInterface} from "../interfaces/BlockhashStoreInterface.sol"; import {AggregatorV3Interface} from "../../shared/interfaces/AggregatorV3Interface.sol"; import {VRFCoordinatorV2Interface} from "../interfaces/VRFCoordinatorV2Interface.sol"; -import {TypeAndVersionInterface} from "../../interfaces/TypeAndVersionInterface.sol"; +import {ITypeAndVersion} from "../../shared/interfaces/ITypeAndVersion.sol"; import {IERC677Receiver} from "../../shared/interfaces/IERC677Receiver.sol"; import {VRF} from "../VRF.sol"; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; import {VRFConsumerBaseV2} from "../VRFConsumerBaseV2.sol"; -contract VRFCoordinatorTestV2 is - VRF, - ConfirmedOwner, - TypeAndVersionInterface, - VRFCoordinatorV2Interface, - IERC677Receiver -{ +contract VRFCoordinatorTestV2 is VRF, ConfirmedOwner, ITypeAndVersion, VRFCoordinatorV2Interface, IERC677Receiver { LinkTokenInterface public immutable LINK; AggregatorV3Interface public immutable LINK_ETH_FEED; BlockhashStoreInterface public immutable BLOCKHASH_STORE; diff --git a/contracts/src/v0.8/vrf/testhelpers/VRFV2LoadTestWithMetrics.sol b/contracts/src/v0.8/vrf/testhelpers/VRFV2LoadTestWithMetrics.sol index b4d0104acee..3e9e7bfc47a 100644 --- a/contracts/src/v0.8/vrf/testhelpers/VRFV2LoadTestWithMetrics.sol +++ b/contracts/src/v0.8/vrf/testhelpers/VRFV2LoadTestWithMetrics.sol @@ -3,7 +3,7 @@ pragma solidity ^0.8.0; import {VRFCoordinatorV2Interface} from "../interfaces/VRFCoordinatorV2Interface.sol"; import {VRFConsumerBaseV2} from "../VRFConsumerBaseV2.sol"; -import {ChainSpecificUtil} from "../../ChainSpecificUtil_v0_8_6.sol"; +import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; import {LinkTokenInterface} from "../../shared/interfaces/LinkTokenInterface.sol"; /** diff --git a/contracts/src/v0.8/vrf/testhelpers/VRFV2OwnerTestConsumer.sol b/contracts/src/v0.8/vrf/testhelpers/VRFV2OwnerTestConsumer.sol index 8f1b275397c..c0c1c659fe1 100644 --- a/contracts/src/v0.8/vrf/testhelpers/VRFV2OwnerTestConsumer.sol +++ b/contracts/src/v0.8/vrf/testhelpers/VRFV2OwnerTestConsumer.sol @@ -4,7 +4,7 @@ pragma solidity ^0.8.0; import {VRFCoordinatorV2Interface} from "../interfaces/VRFCoordinatorV2Interface.sol"; import {VRFConsumerBaseV2} from "../VRFConsumerBaseV2.sol"; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; -import {ChainSpecificUtil} from "../../ChainSpecificUtil_v0_8_6.sol"; +import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; import {LinkTokenInterface} from "../../shared/interfaces/LinkTokenInterface.sol"; contract VRFV2OwnerTestConsumer is VRFConsumerBaseV2, ConfirmedOwner { diff --git a/contracts/src/v0.8/vrf/testhelpers/VRFV2WrapperLoadTestConsumer.sol b/contracts/src/v0.8/vrf/testhelpers/VRFV2WrapperLoadTestConsumer.sol index 3da8f17469a..9501a74b220 100644 --- a/contracts/src/v0.8/vrf/testhelpers/VRFV2WrapperLoadTestConsumer.sol +++ b/contracts/src/v0.8/vrf/testhelpers/VRFV2WrapperLoadTestConsumer.sol @@ -3,7 +3,7 @@ pragma solidity ^0.8.6; import {VRFV2WrapperConsumerBase} from "../VRFV2WrapperConsumerBase.sol"; import {ConfirmedOwner} from "../../shared/access/ConfirmedOwner.sol"; -import {ChainSpecificUtil} from "../../ChainSpecificUtil_v0_8_6.sol"; +import {ChainSpecificUtil} from "../ChainSpecificUtil_v0_8_6.sol"; import {VRFV2WrapperInterface} from "../interfaces/VRFV2WrapperInterface.sol"; contract VRFV2WrapperLoadTestConsumer is VRFV2WrapperConsumerBase, ConfirmedOwner { diff --git a/contracts/test/v0.8/Chainlink.test.ts b/contracts/test/v0.8/Chainlink.test.ts deleted file mode 100644 index 30063ca1024..00000000000 --- a/contracts/test/v0.8/Chainlink.test.ts +++ /dev/null @@ -1,182 +0,0 @@ -import { ethers } from 'hardhat' -import { publicAbi, decodeDietCBOR, hexToBuf } from '../test-helpers/helpers' -import { assert } from 'chai' -import { Contract, ContractFactory, providers, Signer } from 'ethers' -import { Roles, getUsers } from '../test-helpers/setup' -import { makeDebug } from '../test-helpers/debug' - -const debug = makeDebug('ChainlinkTestHelper') -let concreteChainlinkFactory: ContractFactory - -let roles: Roles - -before(async () => { - roles = (await getUsers()).roles - concreteChainlinkFactory = await ethers.getContractFactory( - 'src/v0.8/tests/ChainlinkTestHelper.sol:ChainlinkTestHelper', - roles.defaultAccount, - ) -}) - -describe('ChainlinkTestHelper', () => { - let ccl: Contract - let defaultAccount: Signer - - beforeEach(async () => { - defaultAccount = roles.defaultAccount - ccl = await concreteChainlinkFactory.connect(defaultAccount).deploy() - }) - - it('has a limited public interface [ @skip-coverage ]', () => { - publicAbi(ccl, [ - 'add', - 'addBytes', - 'addInt', - 'addStringArray', - 'addUint', - 'closeEvent', - 'setBuffer', - ]) - }) - - async function parseCCLEvent(tx: providers.TransactionResponse) { - const receipt = await tx.wait() - const data = receipt.logs?.[0].data - const d = debug.extend('parseCCLEvent') - d('data %s', data) - return ethers.utils.defaultAbiCoder.decode(['bytes'], data ?? '') - } - - describe('#close', () => { - it('handles empty payloads', async () => { - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - assert.deepEqual(decoded, {}) - }) - }) - - describe('#setBuffer', () => { - it('emits the buffer', async () => { - await ccl.setBuffer('0xA161616162') - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - assert.deepEqual(decoded, { a: 'b' }) - }) - }) - - describe('#add', () => { - it('stores and logs keys and values', async () => { - await ccl.add('first', 'word!!') - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - assert.deepEqual(decoded, { first: 'word!!' }) - }) - - it('handles two entries', async () => { - await ccl.add('first', 'uno') - await ccl.add('second', 'dos') - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - - assert.deepEqual(decoded, { - first: 'uno', - second: 'dos', - }) - }) - }) - - describe('#addBytes', () => { - it('stores and logs keys and values', async () => { - await ccl.addBytes('first', '0xaabbccddeeff') - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - const expected = hexToBuf('0xaabbccddeeff') - assert.deepEqual(decoded, { first: expected }) - }) - - it('handles two entries', async () => { - await ccl.addBytes('first', '0x756E6F') - await ccl.addBytes('second', '0x646F73') - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - - const expectedFirst = hexToBuf('0x756E6F') - const expectedSecond = hexToBuf('0x646F73') - assert.deepEqual(decoded, { - first: expectedFirst, - second: expectedSecond, - }) - }) - - it('handles strings', async () => { - await ccl.addBytes('first', ethers.utils.toUtf8Bytes('apple')) - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - const expected = ethers.utils.toUtf8Bytes('apple') - assert.deepEqual(decoded, { first: expected }) - }) - }) - - describe('#addInt', () => { - it('stores and logs keys and values', async () => { - await ccl.addInt('first', 1) - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - assert.deepEqual(decoded, { first: 1 }) - }) - - it('handles two entries', async () => { - await ccl.addInt('first', 1) - await ccl.addInt('second', 2) - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - - assert.deepEqual(decoded, { - first: 1, - second: 2, - }) - }) - }) - - describe('#addUint', () => { - it('stores and logs keys and values', async () => { - await ccl.addUint('first', 1) - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - assert.deepEqual(decoded, { first: 1 }) - }) - - it('handles two entries', async () => { - await ccl.addUint('first', 1) - await ccl.addUint('second', 2) - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - - assert.deepEqual(decoded, { - first: 1, - second: 2, - }) - }) - }) - - describe('#addStringArray', () => { - it('stores and logs keys and values', async () => { - await ccl.addStringArray('word', ['seinfeld', '"4"', 'LIFE']) - const tx = await ccl.closeEvent() - const [payload] = await parseCCLEvent(tx) - const decoded = await decodeDietCBOR(payload) - assert.deepEqual(decoded, { word: ['seinfeld', '"4"', 'LIFE'] }) - }) - }) -}) diff --git a/contracts/test/v0.8/ChainlinkClient.test.ts b/contracts/test/v0.8/ChainlinkClient.test.ts deleted file mode 100644 index c5691211c1a..00000000000 --- a/contracts/test/v0.8/ChainlinkClient.test.ts +++ /dev/null @@ -1,452 +0,0 @@ -import { ethers } from 'hardhat' -import { assert } from 'chai' -import { Contract, ContractFactory } from 'ethers' -import { getUsers, Roles } from '../test-helpers/setup' -import { - convertFufillParams, - decodeCCRequest, - decodeRunRequest, - RunRequest, -} from '../test-helpers/oracle' -import { decodeDietCBOR } from '../test-helpers/helpers' -import { evmRevert } from '../test-helpers/matchers' - -let concreteChainlinkClientFactory: ContractFactory -let emptyOracleFactory: ContractFactory -let getterSetterFactory: ContractFactory -let operatorFactory: ContractFactory -let linkTokenFactory: ContractFactory - -let roles: Roles - -before(async () => { - roles = (await getUsers()).roles - - concreteChainlinkClientFactory = await ethers.getContractFactory( - 'src/v0.8/tests/ChainlinkClientTestHelper.sol:ChainlinkClientTestHelper', - roles.defaultAccount, - ) - emptyOracleFactory = await ethers.getContractFactory( - 'src/v0.8/operatorforwarder/test/testhelpers/EmptyOracle.sol:EmptyOracle', - roles.defaultAccount, - ) - getterSetterFactory = await ethers.getContractFactory( - 'src/v0.8/operatorforwarder/test/testhelpers/GetterSetter.sol:GetterSetter', - roles.defaultAccount, - ) - operatorFactory = await ethers.getContractFactory( - 'src/v0.8/operatorforwarder/Operator.sol:Operator', - roles.defaultAccount, - ) - linkTokenFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - roles.defaultAccount, - ) -}) - -describe('ChainlinkClientTestHelper', () => { - const specId = - '0x4c7b7ffb66b344fbaa64995af81e355a00000000000000000000000000000000' - let cc: Contract - let gs: Contract - let oc: Contract - let newoc: Contract - let link: Contract - - beforeEach(async () => { - link = await linkTokenFactory.connect(roles.defaultAccount).deploy() - oc = await operatorFactory - .connect(roles.defaultAccount) - .deploy(link.address, await roles.defaultAccount.getAddress()) - newoc = await operatorFactory - .connect(roles.defaultAccount) - .deploy(link.address, await roles.defaultAccount.getAddress()) - gs = await getterSetterFactory.connect(roles.defaultAccount).deploy() - cc = await concreteChainlinkClientFactory - .connect(roles.defaultAccount) - .deploy(link.address, oc.address) - }) - - describe('#newRequest', () => { - it('forwards the information to the oracle contract through the link token', async () => { - const tx = await cc.publicNewRequest( - specId, - gs.address, - ethers.utils.toUtf8Bytes('requestedBytes32(bytes32,bytes32)'), - ) - const receipt = await tx.wait() - - assert.equal(1, receipt.logs?.length) - const [jId, cbAddr, cbFId, cborData] = receipt.logs - ? decodeCCRequest(receipt.logs[0]) - : [] - const params = decodeDietCBOR(cborData ?? '') - - assert.equal(specId, jId) - assert.equal(gs.address, cbAddr) - assert.equal('0xed53e511', cbFId) - assert.deepEqual({}, params) - }) - }) - - describe('#chainlinkRequest(Request)', () => { - it('emits an event from the contract showing the run ID', async () => { - const tx = await cc.publicRequest( - specId, - cc.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - - const { events, logs } = await tx.wait() - - assert.equal(4, events?.length) - - assert.equal(logs?.[0].address, cc.address) - assert.equal(events?.[0].event, 'ChainlinkRequested') - }) - }) - - describe('#chainlinkRequestTo(Request)', () => { - it('emits an event from the contract showing the run ID', async () => { - const tx = await cc.publicRequestRunTo( - newoc.address, - specId, - cc.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const { events } = await tx.wait() - - assert.equal(4, events?.length) - assert.equal(events?.[0].event, 'ChainlinkRequested') - }) - - it('emits an event on the target oracle contract', async () => { - const tx = await cc.publicRequestRunTo( - newoc.address, - specId, - cc.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const { logs } = await tx.wait() - const event = logs && newoc.interface.parseLog(logs[3]) - - assert.equal(4, logs?.length) - assert.equal(event?.name, 'OracleRequest') - }) - - it('does not modify the stored oracle address', async () => { - await cc.publicRequestRunTo( - newoc.address, - specId, - cc.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - - const actualOracleAddress = await cc.publicOracleAddress() - assert.equal(oc.address, actualOracleAddress) - }) - }) - - describe('#requestOracleData', () => { - it('emits an event from the contract showing the run ID', async () => { - const tx = await cc.publicRequestOracleData( - specId, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - - const { events, logs } = await tx.wait() - - assert.equal(4, events?.length) - - assert.equal(logs?.[0].address, cc.address) - assert.equal(events?.[0].event, 'ChainlinkRequested') - }) - }) - - describe('#requestOracleDataFrom', () => { - it('emits an event from the contract showing the run ID', async () => { - const tx = await cc.publicRequestOracleDataFrom( - newoc.address, - specId, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const { events } = await tx.wait() - - assert.equal(4, events?.length) - assert.equal(events?.[0].event, 'ChainlinkRequested') - }) - - it('emits an event on the target oracle contract', async () => { - const tx = await cc.publicRequestOracleDataFrom( - newoc.address, - specId, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const { logs } = await tx.wait() - const event = logs && newoc.interface.parseLog(logs[3]) - - assert.equal(4, logs?.length) - assert.equal(event?.name, 'OracleRequest') - }) - - it('does not modify the stored oracle address', async () => { - await cc.publicRequestOracleDataFrom( - newoc.address, - specId, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - - const actualOracleAddress = await cc.publicOracleAddress() - assert.equal(oc.address, actualOracleAddress) - }) - }) - - describe('#cancelChainlinkRequest', () => { - let requestId: string - // a concrete chainlink attached to an empty oracle - let ecc: Contract - - beforeEach(async () => { - const emptyOracle = await emptyOracleFactory - .connect(roles.defaultAccount) - .deploy() - ecc = await concreteChainlinkClientFactory - .connect(roles.defaultAccount) - .deploy(link.address, emptyOracle.address) - - const tx = await ecc.publicRequest( - specId, - ecc.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const { events } = await tx.wait() - requestId = (events?.[0]?.args as any).id - }) - - it('emits an event from the contract showing the run was cancelled', async () => { - const tx = await ecc.publicCancelRequest( - requestId, - 0, - ethers.utils.hexZeroPad('0x', 4), - 0, - ) - const { events } = await tx.wait() - - assert.equal(1, events?.length) - assert.equal(events?.[0].event, 'ChainlinkCancelled') - assert.equal(requestId, (events?.[0].args as any).id) - }) - - it('throws if given a bogus event ID', async () => { - await evmRevert( - ecc.publicCancelRequest( - ethers.utils.formatBytes32String('bogusId'), - 0, - ethers.utils.hexZeroPad('0x', 4), - 0, - ), - ) - }) - }) - - describe('#recordChainlinkFulfillment(modifier)', () => { - let request: RunRequest - - beforeEach(async () => { - await oc.setAuthorizedSenders([await roles.defaultAccount.getAddress()]) - const tx = await cc.publicRequest( - specId, - cc.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const { logs } = await tx.wait() - - request = decodeRunRequest(logs?.[3]) - }) - - it('emits an event marking the request fulfilled', async () => { - const tx = await oc - .connect(roles.defaultAccount) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ) - const { logs } = await tx.wait() - - const event = logs && cc.interface.parseLog(logs[1]) - - assert.equal(2, logs?.length) - assert.equal(event?.name, 'ChainlinkFulfilled') - assert.equal(request.requestId, event?.args.id) - }) - - it('should only allow one fulfillment per id', async () => { - await oc - .connect(roles.defaultAccount) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ) - - await evmRevert( - oc - .connect(roles.defaultAccount) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ), - 'Must have a valid requestId', - ) - }) - - it('should only allow the oracle to fulfill the request', async () => { - await evmRevert( - oc - .connect(roles.stranger) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ), - 'Not authorized sender', - ) - }) - }) - - describe('#fulfillChainlinkRequest(function)', () => { - let request: RunRequest - - beforeEach(async () => { - await oc.setAuthorizedSenders([await roles.defaultAccount.getAddress()]) - const tx = await cc.publicRequest( - specId, - cc.address, - ethers.utils.toUtf8Bytes( - 'publicFulfillChainlinkRequest(bytes32,bytes32)', - ), - 0, - ) - const { logs } = await tx.wait() - - request = decodeRunRequest(logs?.[3]) - }) - - it('emits an event marking the request fulfilled', async () => { - await oc.setAuthorizedSenders([await roles.defaultAccount.getAddress()]) - const tx = await oc - .connect(roles.defaultAccount) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ) - - const { logs } = await tx.wait() - const event = logs && cc.interface.parseLog(logs[1]) - - assert.equal(2, logs?.length) - assert.equal(event?.name, 'ChainlinkFulfilled') - assert.equal(request.requestId, event?.args?.id) - }) - - it('should only allow one fulfillment per id', async () => { - await oc - .connect(roles.defaultAccount) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ) - - await evmRevert( - oc - .connect(roles.defaultAccount) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ), - 'Must have a valid requestId', - ) - }) - - it('should only allow the oracle to fulfill the request', async () => { - await evmRevert( - oc - .connect(roles.stranger) - .fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ), - 'Not authorized sender', - ) - }) - }) - - describe('#chainlinkToken', () => { - it('returns the Link Token address', async () => { - const addr = await cc.publicChainlinkToken() - assert.equal(addr, link.address) - }) - }) - - describe('#addExternalRequest', () => { - let mock: Contract - let request: RunRequest - - beforeEach(async () => { - mock = await concreteChainlinkClientFactory - .connect(roles.defaultAccount) - .deploy(link.address, oc.address) - - const tx = await cc.publicRequest( - specId, - mock.address, - ethers.utils.toUtf8Bytes('fulfillRequest(bytes32,bytes32)'), - 0, - ) - const receipt = await tx.wait() - - request = decodeRunRequest(receipt.logs?.[3]) - await mock.publicAddExternalRequest(oc.address, request.requestId) - }) - - it('allows the external request to be fulfilled', async () => { - await oc.setAuthorizedSenders([await roles.defaultAccount.getAddress()]) - await oc.fulfillOracleRequest( - ...convertFufillParams( - request, - ethers.utils.formatBytes32String('hi mom!'), - ), - ) - }) - - it('does not allow the same requestId to be used', async () => { - await evmRevert( - cc.publicAddExternalRequest(newoc.address, request.requestId), - ) - }) - }) -}) diff --git a/contracts/test/v0.8/Flags.test.ts b/contracts/test/v0.8/Flags.test.ts deleted file mode 100644 index eff0912c9e1..00000000000 --- a/contracts/test/v0.8/Flags.test.ts +++ /dev/null @@ -1,405 +0,0 @@ -import { ethers } from 'hardhat' -import { publicAbi } from '../test-helpers/helpers' -import { assert, expect } from 'chai' -import { Contract, ContractFactory } from 'ethers' -import { Personas, getUsers } from '../test-helpers/setup' - -let personas: Personas - -let controllerFactory: ContractFactory -let flagsFactory: ContractFactory -let consumerFactory: ContractFactory - -let controller: Contract -let flags: Contract -let consumer: Contract - -before(async () => { - personas = (await getUsers()).personas - controllerFactory = await ethers.getContractFactory( - 'src/v0.8/shared/access/SimpleWriteAccessController.sol:SimpleWriteAccessController', - personas.Nelly, - ) - consumerFactory = await ethers.getContractFactory( - 'src/v0.8/tests/FlagsTestHelper.sol:FlagsTestHelper', - personas.Nelly, - ) - flagsFactory = await ethers.getContractFactory( - 'src/v0.8/Flags.sol:Flags', - personas.Nelly, - ) -}) - -describe('Flags', () => { - beforeEach(async () => { - controller = await controllerFactory.deploy() - flags = await flagsFactory.deploy(controller.address) - await flags.disableAccessCheck() - consumer = await consumerFactory.deploy(flags.address) - }) - - it('has a limited public interface [ @skip-coverage ]', async () => { - publicAbi(flags, [ - 'getFlag', - 'getFlags', - 'lowerFlags', - 'raiseFlag', - 'raiseFlags', - 'raisingAccessController', - 'setRaisingAccessController', - // Ownable methods: - 'acceptOwnership', - 'owner', - 'transferOwnership', - // AccessControl methods: - 'addAccess', - 'disableAccessCheck', - 'enableAccessCheck', - 'removeAccess', - 'checkEnabled', - 'hasAccess', - ]) - }) - - describe('#raiseFlag', () => { - describe('when called by the owner', () => { - it('updates the warning flag', async () => { - assert.equal(false, await flags.getFlag(consumer.address)) - - await flags.connect(personas.Nelly).raiseFlag(consumer.address) - - assert.equal(true, await flags.getFlag(consumer.address)) - }) - - it('emits an event log', async () => { - await expect(flags.connect(personas.Nelly).raiseFlag(consumer.address)) - .to.emit(flags, 'FlagRaised') - .withArgs(consumer.address) - }) - - describe('if a flag has already been raised', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).raiseFlag(consumer.address) - }) - - it('emits an event log', async () => { - const tx = await flags - .connect(personas.Nelly) - .raiseFlag(consumer.address) - const receipt = await tx.wait() - assert.equal(0, receipt.events?.length) - }) - }) - }) - - describe('when called by an enabled setter', () => { - beforeEach(async () => { - await controller - .connect(personas.Nelly) - .addAccess(await personas.Neil.getAddress()) - }) - - it('sets the flags', async () => { - await flags.connect(personas.Neil).raiseFlag(consumer.address), - assert.equal(true, await flags.getFlag(consumer.address)) - }) - }) - - describe('when called by a non-enabled setter', () => { - it('reverts', async () => { - await expect( - flags.connect(personas.Neil).raiseFlag(consumer.address), - ).to.be.revertedWith('Not allowed to raise flags') - }) - }) - - describe('when called when there is no raisingAccessController', () => { - beforeEach(async () => { - await expect( - flags - .connect(personas.Nelly) - .setRaisingAccessController( - '0x0000000000000000000000000000000000000000', - ), - ).to.emit(flags, 'RaisingAccessControllerUpdated') - assert.equal( - '0x0000000000000000000000000000000000000000', - await flags.raisingAccessController(), - ) - }) - - it('succeeds for the owner', async () => { - await flags.connect(personas.Nelly).raiseFlag(consumer.address) - assert.equal(true, await flags.getFlag(consumer.address)) - }) - - it('reverts for non-owner', async () => { - await expect(flags.connect(personas.Neil).raiseFlag(consumer.address)) - .to.be.reverted - }) - }) - }) - - describe('#raiseFlags', () => { - describe('when called by the owner', () => { - it('updates the warning flag', async () => { - assert.equal(false, await flags.getFlag(consumer.address)) - - await flags.connect(personas.Nelly).raiseFlags([consumer.address]) - - assert.equal(true, await flags.getFlag(consumer.address)) - }) - - it('emits an event log', async () => { - await expect( - flags.connect(personas.Nelly).raiseFlags([consumer.address]), - ) - .to.emit(flags, 'FlagRaised') - .withArgs(consumer.address) - }) - - describe('if a flag has already been raised', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).raiseFlags([consumer.address]) - }) - - it('emits an event log', async () => { - const tx = await flags - .connect(personas.Nelly) - .raiseFlags([consumer.address]) - const receipt = await tx.wait() - assert.equal(0, receipt.events?.length) - }) - }) - }) - - describe('when called by an enabled setter', () => { - beforeEach(async () => { - await controller - .connect(personas.Nelly) - .addAccess(await personas.Neil.getAddress()) - }) - - it('sets the flags', async () => { - await flags.connect(personas.Neil).raiseFlags([consumer.address]), - assert.equal(true, await flags.getFlag(consumer.address)) - }) - }) - - describe('when called by a non-enabled setter', () => { - it('reverts', async () => { - await expect( - flags.connect(personas.Neil).raiseFlags([consumer.address]), - ).to.be.revertedWith('Not allowed to raise flags') - }) - }) - - describe('when called when there is no raisingAccessController', () => { - beforeEach(async () => { - await expect( - flags - .connect(personas.Nelly) - .setRaisingAccessController( - '0x0000000000000000000000000000000000000000', - ), - ).to.emit(flags, 'RaisingAccessControllerUpdated') - - assert.equal( - '0x0000000000000000000000000000000000000000', - await flags.raisingAccessController(), - ) - }) - - it('succeeds for the owner', async () => { - await flags.connect(personas.Nelly).raiseFlags([consumer.address]) - assert.equal(true, await flags.getFlag(consumer.address)) - }) - - it('reverts for non-owners', async () => { - await expect( - flags.connect(personas.Neil).raiseFlags([consumer.address]), - ).to.be.reverted - }) - }) - }) - - describe('#lowerFlags', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).raiseFlags([consumer.address]) - }) - - describe('when called by the owner', () => { - it('updates the warning flag', async () => { - assert.equal(true, await flags.getFlag(consumer.address)) - - await flags.connect(personas.Nelly).lowerFlags([consumer.address]) - - assert.equal(false, await flags.getFlag(consumer.address)) - }) - - it('emits an event log', async () => { - await expect( - flags.connect(personas.Nelly).lowerFlags([consumer.address]), - ) - .to.emit(flags, 'FlagLowered') - .withArgs(consumer.address) - }) - - describe('if a flag has already been raised', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).lowerFlags([consumer.address]) - }) - - it('emits an event log', async () => { - const tx = await flags - .connect(personas.Nelly) - .lowerFlags([consumer.address]) - const receipt = await tx.wait() - assert.equal(0, receipt.events?.length) - }) - }) - }) - - describe('when called by a non-owner', () => { - it('reverts', async () => { - await expect( - flags.connect(personas.Neil).lowerFlags([consumer.address]), - ).to.be.revertedWith('Only callable by owner') - }) - }) - }) - - describe('#getFlag', () => { - describe('if the access control is turned on', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).enableAccessCheck() - }) - - it('reverts', async () => { - await expect(consumer.getFlag(consumer.address)).to.be.revertedWith( - 'No access', - ) - }) - - describe('if access is granted to the address', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).addAccess(consumer.address) - }) - - it('does not revert', async () => { - await consumer.getFlag(consumer.address) - }) - }) - }) - - describe('if the access control is turned off', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).disableAccessCheck() - }) - - it('does not revert', async () => { - await consumer.getFlag(consumer.address) - }) - - describe('if access is granted to the address', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).addAccess(consumer.address) - }) - - it('does not revert', async () => { - await consumer.getFlag(consumer.address) - }) - }) - }) - }) - - describe('#getFlags', () => { - beforeEach(async () => { - await flags.connect(personas.Nelly).disableAccessCheck() - await flags - .connect(personas.Nelly) - .raiseFlags([ - await personas.Neil.getAddress(), - await personas.Norbert.getAddress(), - ]) - }) - - it('respects the access controls of #getFlag', async () => { - await flags.connect(personas.Nelly).enableAccessCheck() - - await expect(consumer.getFlag(consumer.address)).to.be.revertedWith( - 'No access', - ) - - await flags.connect(personas.Nelly).addAccess(consumer.address) - - await consumer.getFlag(consumer.address) - }) - - it('returns the flags in the order they are requested', async () => { - const response = await consumer.getFlags([ - await personas.Nelly.getAddress(), - await personas.Neil.getAddress(), - await personas.Ned.getAddress(), - await personas.Norbert.getAddress(), - ]) - - assert.deepEqual([false, true, false, true], response) - }) - }) - - describe('#setRaisingAccessController', () => { - let controller2: Contract - - beforeEach(async () => { - controller2 = await controllerFactory.connect(personas.Nelly).deploy() - await controller2.connect(personas.Nelly).enableAccessCheck() - }) - - it('updates access control rules', async () => { - const neilAddress = await personas.Neil.getAddress() - await controller.connect(personas.Nelly).addAccess(neilAddress) - await flags.connect(personas.Neil).raiseFlags([consumer.address]) // doesn't raise - - await flags - .connect(personas.Nelly) - .setRaisingAccessController(controller2.address) - - await expect( - flags.connect(personas.Neil).raiseFlags([consumer.address]), - ).to.be.revertedWith('Not allowed to raise flags') - }) - - it('emits a log announcing the change', async () => { - await expect( - flags - .connect(personas.Nelly) - .setRaisingAccessController(controller2.address), - ) - .to.emit(flags, 'RaisingAccessControllerUpdated') - .withArgs(controller.address, controller2.address) - }) - - it('does not emit a log when there is no change', async () => { - await flags - .connect(personas.Nelly) - .setRaisingAccessController(controller2.address) - - await expect( - flags - .connect(personas.Nelly) - .setRaisingAccessController(controller2.address), - ).to.not.emit(flags, 'RaisingAccessControllerUpdated') - }) - - describe('when called by a non-owner', () => { - it('reverts', async () => { - await expect( - flags - .connect(personas.Neil) - .setRaisingAccessController(controller2.address), - ).to.be.revertedWith('Only callable by owner') - }) - }) - }) -}) diff --git a/contracts/test/v0.8/HeartbeatRequester.test.ts b/contracts/test/v0.8/HeartbeatRequester.test.ts deleted file mode 100644 index bb58192337d..00000000000 --- a/contracts/test/v0.8/HeartbeatRequester.test.ts +++ /dev/null @@ -1,142 +0,0 @@ -import { getUsers, Personas } from '../test-helpers/setup' -import { ethers } from 'hardhat' -import { Signer } from 'ethers' -import { - HeartbeatRequester, - MockAggregatorProxy, - MockOffchainAggregator, -} from '../../typechain' -import { HeartbeatRequester__factory as HeartbeatRequesterFactory } from '../../typechain/factories/HeartbeatRequester__factory' -import { MockAggregatorProxy__factory as MockAggregatorProxyFactory } from '../../typechain/factories/MockAggregatorProxy__factory' -import { MockOffchainAggregator__factory as MockOffchainAggregatorFactory } from '../../typechain/factories/MockOffchainAggregator__factory' -import { assert, expect } from 'chai' - -let personas: Personas -let owner: Signer -let caller1: Signer -let proxy1: Signer -let proxy2: Signer -let aggregator: MockOffchainAggregator -let aggregatorFactory: MockOffchainAggregatorFactory -let aggregatorProxy: MockAggregatorProxy -let aggregatorProxyFactory: MockAggregatorProxyFactory -let requester: HeartbeatRequester -let requesterFactory: HeartbeatRequesterFactory - -describe('HeartbeatRequester', () => { - beforeEach(async () => { - personas = (await getUsers()).personas - owner = personas.Default - caller1 = personas.Carol - proxy1 = personas.Nelly - proxy2 = personas.Eddy - - // deploy heartbeat requester - requesterFactory = await ethers.getContractFactory('HeartbeatRequester') - requester = await requesterFactory.connect(owner).deploy() - await requester.deployed() - }) - - describe('#permitHeartbeat', () => { - it('adds a heartbeat and emits an event', async () => { - const callerAddress = await caller1.getAddress() - const proxyAddress1 = await proxy1.getAddress() - const proxyAddress2 = await proxy2.getAddress() - const tx1 = await requester - .connect(owner) - .permitHeartbeat(callerAddress, proxyAddress1) - await expect(tx1) - .to.emit(requester, 'HeartbeatPermitted') - .withArgs(callerAddress, proxyAddress1, ethers.constants.AddressZero) - - const tx2 = await requester - .connect(owner) - .permitHeartbeat(callerAddress, proxyAddress2) - await expect(tx2) - .to.emit(requester, 'HeartbeatPermitted') - .withArgs(callerAddress, proxyAddress2, proxyAddress1) - }) - - it('reverts when not called by its owner', async () => { - const callerAddress = await caller1.getAddress() - const proxyAddress = await proxy1.getAddress() - await expect( - requester.connect(caller1).permitHeartbeat(callerAddress, proxyAddress), - ).to.be.revertedWith('Only callable by owner') - }) - }) - - describe('#removeHeartbeat', () => { - it('removes a heartbeat and emits an event', async () => { - const callerAddress = await caller1.getAddress() - const proxyAddress = await proxy1.getAddress() - const tx1 = await requester - .connect(owner) - .permitHeartbeat(callerAddress, proxyAddress) - await expect(tx1) - .to.emit(requester, 'HeartbeatPermitted') - .withArgs(callerAddress, proxyAddress, ethers.constants.AddressZero) - - const tx2 = await requester.connect(owner).removeHeartbeat(callerAddress) - await expect(tx2) - .to.emit(requester, 'HeartbeatRemoved') - .withArgs(callerAddress, proxyAddress) - }) - - it('reverts when not called by its owner', async () => { - await expect( - requester.connect(caller1).removeHeartbeat(await caller1.getAddress()), - ).to.be.revertedWith('Only callable by owner') - }) - }) - - describe('#getAggregatorAndRequestHeartbeat', () => { - it('reverts if caller and proxy combination is not allowed', async () => { - const callerAddress = await caller1.getAddress() - const proxyAddress = await proxy1.getAddress() - await requester - .connect(owner) - .permitHeartbeat(callerAddress, proxyAddress) - - await expect( - requester - .connect(caller1) - .getAggregatorAndRequestHeartbeat(await owner.getAddress()), - ).to.be.revertedWithCustomError(requester, 'HeartbeatNotPermitted') - }) - - it('calls corresponding aggregator to request a new round', async () => { - aggregatorFactory = await ethers.getContractFactory( - 'MockOffchainAggregator', - ) - aggregator = await aggregatorFactory.connect(owner).deploy() - await aggregator.deployed() - - aggregatorProxyFactory = await ethers.getContractFactory( - 'MockAggregatorProxy', - ) - aggregatorProxy = await aggregatorProxyFactory - .connect(owner) - .deploy(aggregator.address) - await aggregatorProxy.deployed() - - await requester - .connect(owner) - .permitHeartbeat(await caller1.getAddress(), aggregatorProxy.address) - - const tx1 = await requester - .connect(caller1) - .getAggregatorAndRequestHeartbeat(aggregatorProxy.address) - - await expect(tx1).to.emit(aggregator, 'RoundIdUpdated').withArgs(1) - assert.equal((await aggregator.roundId()).toNumber(), 1) - - const tx2 = await requester - .connect(caller1) - .getAggregatorAndRequestHeartbeat(aggregatorProxy.address) - - await expect(tx2).to.emit(aggregator, 'RoundIdUpdated').withArgs(2) - assert.equal((await aggregator.roundId()).toNumber(), 2) - }) - }) -}) diff --git a/contracts/test/v0.8/PermissionedForwardProxy.test.ts b/contracts/test/v0.8/PermissionedForwardProxy.test.ts deleted file mode 100644 index 12ce63cd9b4..00000000000 --- a/contracts/test/v0.8/PermissionedForwardProxy.test.ts +++ /dev/null @@ -1,176 +0,0 @@ -import { ethers } from 'hardhat' -import { publicAbi } from '../test-helpers/helpers' -import { assert, expect } from 'chai' -import { Contract, ContractFactory } from 'ethers' -import { getUsers, Personas } from '../test-helpers/setup' - -const PERMISSION_NOT_SET = 'PermissionNotSet' - -let personas: Personas - -let controllerFactory: ContractFactory -let counterFactory: ContractFactory -let controller: Contract -let counter: Contract - -before(async () => { - personas = (await getUsers()).personas - controllerFactory = await ethers.getContractFactory( - 'src/v0.8/PermissionedForwardProxy.sol:PermissionedForwardProxy', - personas.Carol, - ) - counterFactory = await ethers.getContractFactory( - 'src/v0.8/tests/Counter.sol:Counter', - personas.Carol, - ) -}) - -describe('PermissionedForwardProxy', () => { - beforeEach(async () => { - controller = await controllerFactory.connect(personas.Carol).deploy() - counter = await counterFactory.connect(personas.Carol).deploy() - }) - - it('has a limited public interface [ @skip-coverage ]', async () => { - publicAbi(controller, [ - 'forward', - 'setPermission', - 'removePermission', - 'getPermission', - // Owned - 'acceptOwnership', - 'owner', - 'transferOwnership', - ]) - }) - - describe('#setPermission', () => { - describe('when called by a non-owner', () => { - it('reverts', async () => { - await expect( - controller - .connect(personas.Eddy) - .setPermission( - await personas.Carol.getAddress(), - await personas.Eddy.getAddress(), - ), - ).to.be.revertedWith('Only callable by owner') - }) - }) - - describe('when called by the owner', () => { - it('adds the permission to the proxy', async () => { - const tx = await controller - .connect(personas.Carol) - .setPermission( - await personas.Carol.getAddress(), - await personas.Eddy.getAddress(), - ) - const receipt = await tx.wait() - const eventLog = receipt?.events - - assert.equal(eventLog?.length, 1) - assert.equal(eventLog?.[0].event, 'PermissionSet') - assert.equal(eventLog?.[0].args?.[0], await personas.Carol.getAddress()) - assert.equal(eventLog?.[0].args?.[1], await personas.Eddy.getAddress()) - - expect( - await controller.getPermission(await personas.Carol.getAddress()), - ).to.be.equal(await personas.Eddy.getAddress()) - }) - }) - }) - - describe('#removePermission', () => { - beforeEach(async () => { - // Add permission before testing - await controller - .connect(personas.Carol) - .setPermission( - await personas.Carol.getAddress(), - await personas.Eddy.getAddress(), - ) - }) - - describe('when called by a non-owner', () => { - it('reverts', async () => { - await expect( - controller - .connect(personas.Eddy) - .removePermission(await personas.Carol.getAddress()), - ).to.be.revertedWith('Only callable by owner') - }) - }) - - describe('when called by the owner', () => { - it('removes the permission to the proxy', async () => { - const tx = await controller - .connect(personas.Carol) - .removePermission(await personas.Carol.getAddress()) - - const receipt = await tx.wait() - const eventLog = receipt?.events - - assert.equal(eventLog?.length, 1) - assert.equal(eventLog?.[0].event, 'PermissionRemoved') - assert.equal(eventLog?.[0].args?.[0], await personas.Carol.getAddress()) - - expect( - await controller.getPermission(await personas.Carol.getAddress()), - ).to.be.equal(ethers.constants.AddressZero) - }) - }) - }) - - describe('#forward', () => { - describe('when permission does not exist', () => { - it('reverts', async () => { - await expect( - controller - .connect(personas.Carol) - .forward(await personas.Eddy.getAddress(), '0x'), - ).to.be.revertedWithCustomError(controller, PERMISSION_NOT_SET) - }) - }) - - describe('when permission exists', () => { - beforeEach(async () => { - // Add permission before testing - await controller - .connect(personas.Carol) - .setPermission(await personas.Carol.getAddress(), counter.address) - }) - - it('calls target successfully', async () => { - await controller - .connect(personas.Carol) - .forward( - counter.address, - counter.interface.encodeFunctionData('increment'), - ) - - expect(await counter.count()).to.be.equal(1) - }) - - it('reverts when target reverts and bubbles up error', async () => { - await expect( - controller - .connect(personas.Carol) - .forward( - counter.address, - counter.interface.encodeFunctionData('alwaysRevertWithString'), - ), - ).to.be.revertedWith('always revert') // Revert strings should be bubbled up - - await expect( - controller - .connect(personas.Carol) - .forward( - counter.address, - counter.interface.encodeFunctionData('alwaysRevert'), - ), - ).to.be.reverted // Javascript VM not able to parse custom errors defined on another contract - }) - }) - }) -}) diff --git a/contracts/test/v0.8/ValidatorProxy.test.ts b/contracts/test/v0.8/ValidatorProxy.test.ts deleted file mode 100644 index 2d274245de4..00000000000 --- a/contracts/test/v0.8/ValidatorProxy.test.ts +++ /dev/null @@ -1,403 +0,0 @@ -import { ethers } from 'hardhat' -import { publicAbi } from '../test-helpers/helpers' -import { assert, expect } from 'chai' -import { Signer, Contract, constants } from 'ethers' -import { Users, getUsers } from '../test-helpers/setup' - -let users: Users - -let owner: Signer -let ownerAddress: string -let aggregator: Signer -let aggregatorAddress: string -let validator: Signer -let validatorAddress: string -let validatorProxy: Contract - -before(async () => { - users = await getUsers() - owner = users.personas.Default - aggregator = users.contracts.contract1 - validator = users.contracts.contract2 - ownerAddress = await owner.getAddress() - aggregatorAddress = await aggregator.getAddress() - validatorAddress = await validator.getAddress() -}) - -describe('ValidatorProxy', () => { - beforeEach(async () => { - const vpf = await ethers.getContractFactory( - 'src/v0.8/ValidatorProxy.sol:ValidatorProxy', - owner, - ) - validatorProxy = await vpf.deploy(aggregatorAddress, validatorAddress) - validatorProxy = await validatorProxy.deployed() - }) - - it('has a limited public interface [ @skip-coverage ]', async () => { - publicAbi(validatorProxy, [ - // ConfirmedOwner functions - 'acceptOwnership', - 'owner', - 'transferOwnership', - // ValidatorProxy functions - 'validate', - 'proposeNewAggregator', - 'upgradeAggregator', - 'getAggregators', - 'proposeNewValidator', - 'upgradeValidator', - 'getValidators', - 'typeAndVersion', - ]) - }) - - describe('#constructor', () => { - it('should set the aggregator addresses correctly', async () => { - const response = await validatorProxy.getAggregators() - assert.equal(response.current, aggregatorAddress) - assert.equal(response.hasProposal, false) - assert.equal(response.proposed, constants.AddressZero) - }) - - it('should set the validator addresses conrrectly', async () => { - const response = await validatorProxy.getValidators() - assert.equal(response.current, validatorAddress) - assert.equal(response.hasProposal, false) - assert.equal(response.proposed, constants.AddressZero) - }) - - it('should set the owner correctly', async () => { - const response = await validatorProxy.owner() - assert.equal(response, ownerAddress) - }) - }) - - describe('#proposeNewAggregator', () => { - let newAggregator: Signer - let newAggregatorAddress: string - beforeEach(async () => { - newAggregator = users.contracts.contract3 - newAggregatorAddress = await newAggregator.getAddress() - }) - - describe('failure', () => { - it('should only be called by the owner', async () => { - const stranger = users.contracts.contract4 - await expect( - validatorProxy - .connect(stranger) - .proposeNewAggregator(newAggregatorAddress), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should revert if no change in proposal', async () => { - await validatorProxy.proposeNewAggregator(newAggregatorAddress) - await expect( - validatorProxy.proposeNewAggregator(newAggregatorAddress), - ).to.be.revertedWith('Invalid proposal') - }) - - it('should revert if the proposal is the same as the current', async () => { - await expect( - validatorProxy.proposeNewAggregator(aggregatorAddress), - ).to.be.revertedWith('Invalid proposal') - }) - }) - - describe('success', () => { - it('should emit an event', async () => { - await expect(validatorProxy.proposeNewAggregator(newAggregatorAddress)) - .to.emit(validatorProxy, 'AggregatorProposed') - .withArgs(newAggregatorAddress) - }) - - it('should set the correct address and hasProposal is true', async () => { - await validatorProxy.proposeNewAggregator(newAggregatorAddress) - const response = await validatorProxy.getAggregators() - assert.equal(response.current, aggregatorAddress) - assert.equal(response.hasProposal, true) - assert.equal(response.proposed, newAggregatorAddress) - }) - - it('should set a zero address and hasProposal is false', async () => { - await validatorProxy.proposeNewAggregator(newAggregatorAddress) - await validatorProxy.proposeNewAggregator(constants.AddressZero) - const response = await validatorProxy.getAggregators() - assert.equal(response.current, aggregatorAddress) - assert.equal(response.hasProposal, false) - assert.equal(response.proposed, constants.AddressZero) - }) - }) - }) - - describe('#upgradeAggregator', () => { - describe('failure', () => { - it('should only be called by the owner', async () => { - const stranger = users.contracts.contract4 - await expect( - validatorProxy.connect(stranger).upgradeAggregator(), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should revert if there is no proposal', async () => { - await expect(validatorProxy.upgradeAggregator()).to.be.revertedWith( - 'No proposal', - ) - }) - }) - - describe('success', () => { - let newAggregator: Signer - let newAggregatorAddress: string - beforeEach(async () => { - newAggregator = users.contracts.contract3 - newAggregatorAddress = await newAggregator.getAddress() - await validatorProxy.proposeNewAggregator(newAggregatorAddress) - }) - - it('should emit an event', async () => { - await expect(validatorProxy.upgradeAggregator()) - .to.emit(validatorProxy, 'AggregatorUpgraded') - .withArgs(aggregatorAddress, newAggregatorAddress) - }) - - it('should upgrade the addresses', async () => { - await validatorProxy.upgradeAggregator() - const response = await validatorProxy.getAggregators() - assert.equal(response.current, newAggregatorAddress) - assert.equal(response.hasProposal, false) - assert.equal(response.proposed, constants.AddressZero) - }) - }) - }) - - describe('#proposeNewValidator', () => { - let newValidator: Signer - let newValidatorAddress: string - - beforeEach(async () => { - newValidator = users.contracts.contract3 - newValidatorAddress = await newValidator.getAddress() - }) - - describe('failure', () => { - it('should only be called by the owner', async () => { - const stranger = users.contracts.contract4 - await expect( - validatorProxy - .connect(stranger) - .proposeNewAggregator(newValidatorAddress), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should revert if no change in proposal', async () => { - await validatorProxy.proposeNewValidator(newValidatorAddress) - await expect( - validatorProxy.proposeNewValidator(newValidatorAddress), - ).to.be.revertedWith('Invalid proposal') - }) - - it('should revert if the proposal is the same as the current', async () => { - await expect( - validatorProxy.proposeNewValidator(validatorAddress), - ).to.be.revertedWith('Invalid proposal') - }) - }) - - describe('success', () => { - it('should emit an event', async () => { - await expect(validatorProxy.proposeNewValidator(newValidatorAddress)) - .to.emit(validatorProxy, 'ValidatorProposed') - .withArgs(newValidatorAddress) - }) - - it('should set the correct address and hasProposal is true', async () => { - await validatorProxy.proposeNewValidator(newValidatorAddress) - const response = await validatorProxy.getValidators() - assert.equal(response.current, validatorAddress) - assert.equal(response.hasProposal, true) - assert.equal(response.proposed, newValidatorAddress) - }) - - it('should set a zero address and hasProposal is false', async () => { - await validatorProxy.proposeNewValidator(newValidatorAddress) - await validatorProxy.proposeNewValidator(constants.AddressZero) - const response = await validatorProxy.getValidators() - assert.equal(response.current, validatorAddress) - assert.equal(response.hasProposal, false) - assert.equal(response.proposed, constants.AddressZero) - }) - }) - }) - - describe('#upgradeValidator', () => { - describe('failure', () => { - it('should only be called by the owner', async () => { - const stranger = users.contracts.contract4 - await expect( - validatorProxy.connect(stranger).upgradeValidator(), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should revert if there is no proposal', async () => { - await expect(validatorProxy.upgradeValidator()).to.be.revertedWith( - 'No proposal', - ) - }) - }) - - describe('success', () => { - let newValidator: Signer - let newValidatorAddress: string - beforeEach(async () => { - newValidator = users.contracts.contract3 - newValidatorAddress = await newValidator.getAddress() - await validatorProxy.proposeNewValidator(newValidatorAddress) - }) - - it('should emit an event', async () => { - await expect(validatorProxy.upgradeValidator()) - .to.emit(validatorProxy, 'ValidatorUpgraded') - .withArgs(validatorAddress, newValidatorAddress) - }) - - it('should upgrade the addresses', async () => { - await validatorProxy.upgradeValidator() - const response = await validatorProxy.getValidators() - assert.equal(response.current, newValidatorAddress) - assert.equal(response.hasProposal, false) - assert.equal(response.proposed, constants.AddressZero) - }) - }) - }) - - describe('#validate', () => { - describe('failure', () => { - it('reverts when not called by aggregator or proposed aggregator', async () => { - const stranger = users.contracts.contract5 - await expect( - validatorProxy.connect(stranger).validate(99, 88, 77, 66), - ).to.be.revertedWith('Not a configured aggregator') - }) - - it('reverts when there is no validator set', async () => { - const vpf = await ethers.getContractFactory( - 'src/v0.8/ValidatorProxy.sol:ValidatorProxy', - owner, - ) - validatorProxy = await vpf.deploy( - aggregatorAddress, - constants.AddressZero, - ) - await validatorProxy.deployed() - await expect( - validatorProxy.connect(aggregator).validate(99, 88, 77, 66), - ).to.be.revertedWith('No validator set') - }) - }) - - describe('success', () => { - describe('from the aggregator', () => { - let mockValidator1: Contract - beforeEach(async () => { - const mvf = await ethers.getContractFactory( - 'src/v0.8/mocks/MockAggregatorValidator.sol:MockAggregatorValidator', - owner, - ) - mockValidator1 = await mvf.deploy(1) - mockValidator1 = await mockValidator1.deployed() - const vpf = await ethers.getContractFactory( - 'src/v0.8/ValidatorProxy.sol:ValidatorProxy', - owner, - ) - validatorProxy = await vpf.deploy( - aggregatorAddress, - mockValidator1.address, - ) - validatorProxy = await validatorProxy.deployed() - }) - - describe('for a single validator', () => { - it('calls validate on the validator', async () => { - await expect( - validatorProxy.connect(aggregator).validate(200, 300, 400, 500), - ) - .to.emit(mockValidator1, 'ValidateCalled') - .withArgs(1, 200, 300, 400, 500) - }) - - it('uses a specific amount of gas [ @skip-coverage ]', async () => { - const resp = await validatorProxy - .connect(aggregator) - .validate(200, 300, 400, 500) - const receipt = await resp.wait() - assert.equal(receipt.gasUsed.toString(), '32373') - }) - }) - - describe('for a validator and a proposed validator', () => { - let mockValidator2: Contract - - beforeEach(async () => { - const mvf = await ethers.getContractFactory( - 'src/v0.8/mocks/MockAggregatorValidator.sol:MockAggregatorValidator', - owner, - ) - mockValidator2 = await mvf.deploy(2) - mockValidator2 = await mockValidator2.deployed() - await validatorProxy.proposeNewValidator(mockValidator2.address) - }) - - it('calls validate on the validator', async () => { - await expect( - validatorProxy - .connect(aggregator) - .validate(2000, 3000, 4000, 5000), - ) - .to.emit(mockValidator1, 'ValidateCalled') - .withArgs(1, 2000, 3000, 4000, 5000) - }) - - it('also calls validate on the proposed validator', async () => { - await expect( - validatorProxy - .connect(aggregator) - .validate(2000, 3000, 4000, 5000), - ) - .to.emit(mockValidator2, 'ValidateCalled') - .withArgs(2, 2000, 3000, 4000, 5000) - }) - - it('uses a specific amount of gas [ @skip-coverage ]', async () => { - const resp = await validatorProxy - .connect(aggregator) - .validate(2000, 3000, 4000, 5000) - const receipt = await resp.wait() - assert.equal(receipt.gasUsed.toString(), '40429') - }) - }) - }) - - describe('from the proposed aggregator', () => { - let newAggregator: Signer - let newAggregatorAddress: string - beforeEach(async () => { - newAggregator = users.contracts.contract3 - newAggregatorAddress = await newAggregator.getAddress() - await validatorProxy - .connect(owner) - .proposeNewAggregator(newAggregatorAddress) - }) - - it('emits an event', async () => { - await expect( - validatorProxy.connect(newAggregator).validate(555, 666, 777, 888), - ) - .to.emit(validatorProxy, 'ProposedAggregatorValidateCall') - .withArgs(newAggregatorAddress, 555, 666, 777, 888) - }) - }) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts b/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts index c2e08f4cd81..f393a5de1c2 100644 --- a/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts +++ b/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts @@ -69,7 +69,7 @@ describeMaybe('Automation Gas Analysis', () => { const getFact = ethers.getContractFactory const linkTokenFactory = await getFact('LinkToken') const mockV3AggregatorFactory = await getFact( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', ) const upkeepMockFactory = await getFact('UpkeepMock') const registry12Factory = await getFact('KeeperRegistry1_2') diff --git a/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts b/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts index a096ee4f481..6d3d591acb0 100644 --- a/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts +++ b/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts @@ -41,7 +41,7 @@ describe('AutomationRegistrar2_1 - Frozen [ @skip-coverage ]', () => { // 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', // ) // mockV3AggregatorFactory = (await ethers.getContractFactory( -// 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', +// 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', // )) as unknown as MockV3AggregatorFactory // upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') // }) diff --git a/contracts/test/v0.8/automation/AutomationRegistrar2_3.test.ts b/contracts/test/v0.8/automation/AutomationRegistrar2_3.test.ts index 31712e1380b..e98218ec214 100644 --- a/contracts/test/v0.8/automation/AutomationRegistrar2_3.test.ts +++ b/contracts/test/v0.8/automation/AutomationRegistrar2_3.test.ts @@ -44,7 +44,7 @@ before(async () => { 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', ) mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', )) as unknown as MockV3AggregatorFactory upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') }) diff --git a/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts b/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts index 6b220f2f7cb..593ac08a5e7 100644 --- a/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts +++ b/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts @@ -419,7 +419,7 @@ describe('AutomationRegistry2_2', () => { ) // need full path because there are two contracts with name MockV3Aggregator mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', )) as unknown as MockV3AggregatorFactory mockArbGasInfoFactory = await ethers.getContractFactory('MockArbGasInfo') mockOVMGasPriceOracleFactory = await ethers.getContractFactory( diff --git a/contracts/test/v0.8/automation/AutomationRegistry2_3.test.ts b/contracts/test/v0.8/automation/AutomationRegistry2_3.test.ts index f3c2d9bb984..48ec8469f9a 100644 --- a/contracts/test/v0.8/automation/AutomationRegistry2_3.test.ts +++ b/contracts/test/v0.8/automation/AutomationRegistry2_3.test.ts @@ -431,7 +431,7 @@ describe('AutomationRegistry2_3', () => { ) // need full path because there are two contracts with name MockV3Aggregator mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', )) as unknown as MockV3AggregatorFactory mockArbGasInfoFactory = await ethers.getContractFactory('MockArbGasInfo') mockOVMGasPriceOracleFactory = await ethers.getContractFactory( diff --git a/contracts/test/v0.8/automation/KeeperCompatible.test.ts b/contracts/test/v0.8/automation/KeeperCompatible.test.ts index 13d1d0deff5..17c83790811 100644 --- a/contracts/test/v0.8/automation/KeeperCompatible.test.ts +++ b/contracts/test/v0.8/automation/KeeperCompatible.test.ts @@ -10,7 +10,7 @@ describe('KeeperCompatible', () => { before(async () => { const factory = await ethers.getContractFactory( - `src/v0.${version}/tests/KeeperCompatibleTestHelper.sol:KeeperCompatibleTestHelper`, + `src/v0.${version}/automation/testhelpers/KeeperCompatibleTestHelper.sol:KeeperCompatibleTestHelper`, ) contract = await factory.deploy() }) diff --git a/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts b/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts index d58cfd377f7..7fd811d8226 100644 --- a/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts +++ b/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts @@ -132,7 +132,7 @@ before(async () => { ) // need full path because there are two contracts with name MockV3Aggregator mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', )) as unknown as MockV3AggregatorFactory upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') diff --git a/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts b/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts index 392a1cb5966..b49dfb1d5b4 100644 --- a/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts +++ b/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts @@ -335,7 +335,7 @@ const setup = async () => { linkToken = await linkTokenFactory.connect(owner).deploy() // need full path because there are two contracts with name MockV3Aggregator const mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', )) as unknown as MockV3AggregatorFactory gasPriceFeed = await mockV3AggregatorFactory.connect(owner).deploy(0, gasWei) diff --git a/contracts/test/v0.8/automation/ZKSyncAutomationRegistry2_3.test.ts b/contracts/test/v0.8/automation/ZKSyncAutomationRegistry2_3.test.ts index 95210cf6444..ffbde4464b9 100644 --- a/contracts/test/v0.8/automation/ZKSyncAutomationRegistry2_3.test.ts +++ b/contracts/test/v0.8/automation/ZKSyncAutomationRegistry2_3.test.ts @@ -416,7 +416,7 @@ describe('ZKSyncAutomationRegistry2_3', () => { ) // need full path because there are two contracts with name MockV3Aggregator mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/tests/MockV3Aggregator.sol:MockV3Aggregator', + 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', )) as unknown as MockV3AggregatorFactory mockZKSyncSystemContextFactory = await ethers.getContractFactory( 'MockZKSyncSystemContext', diff --git a/contracts/test/v0.8/operatorforwarder/AuthorizedForwarder.test.ts b/contracts/test/v0.8/operatorforwarder/AuthorizedForwarder.test.ts index d4e1918c976..6530a2f3c4e 100644 --- a/contracts/test/v0.8/operatorforwarder/AuthorizedForwarder.test.ts +++ b/contracts/test/v0.8/operatorforwarder/AuthorizedForwarder.test.ts @@ -22,7 +22,7 @@ before(async () => { roles.defaultAccount, ) brokenFactory = await ethers.getContractFactory( - 'src/v0.8/tests/Broken.sol:Broken', + 'src/v0.8/operatorforwarder/test/Broken.sol:Broken', roles.defaultAccount, ) forwarderFactory = await ethers.getContractFactory( diff --git a/core/chains/evm/logpoller/helper_test.go b/core/chains/evm/logpoller/helper_test.go index b8d849d7d83..6a5959c5586 100644 --- a/core/chains/evm/logpoller/helper_test.go +++ b/core/chains/evm/logpoller/helper_test.go @@ -25,7 +25,7 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/chains/evm/headtracker" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/logpoller" evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" ) diff --git a/core/chains/evm/logpoller/log_poller_internal_test.go b/core/chains/evm/logpoller/log_poller_internal_test.go index 620bbf14f41..757c5d4193c 100644 --- a/core/chains/evm/logpoller/log_poller_internal_test.go +++ b/core/chains/evm/logpoller/log_poller_internal_test.go @@ -32,7 +32,7 @@ import ( evmclimocks "github.com/smartcontractkit/chainlink/v2/core/chains/evm/client/mocks" evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" ) diff --git a/core/chains/evm/logpoller/log_poller_test.go b/core/chains/evm/logpoller/log_poller_test.go index df688cd5e5c..3a1eb7b186f 100644 --- a/core/chains/evm/logpoller/log_poller_test.go +++ b/core/chains/evm/logpoller/log_poller_test.go @@ -38,7 +38,7 @@ import ( evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils" ubig "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils/big" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/evmtest" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" diff --git a/core/gethwrappers/abigen_test.go b/core/gethwrappers/abigen_test.go index 5874bf0b57c..21858f67ee4 100644 --- a/core/gethwrappers/abigen_test.go +++ b/core/gethwrappers/abigen_test.go @@ -8,7 +8,7 @@ import ( "github.com/ethereum/go-ethereum/ethclient/simulated" "github.com/stretchr/testify/require" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" ) diff --git a/core/gethwrappers/generated/type_and_version_interface_wrapper/type_and_version_interface_wrapper.go b/core/gethwrappers/generated/type_and_version_interface_wrapper/type_and_version_interface_wrapper.go deleted file mode 100644 index bf907b0354b..00000000000 --- a/core/gethwrappers/generated/type_and_version_interface_wrapper/type_and_version_interface_wrapper.go +++ /dev/null @@ -1,183 +0,0 @@ -// Code generated - DO NOT EDIT. -// This file is a generated binding and any manual changes will be lost. - -package type_and_version_interface_wrapper - -import ( - "errors" - "math/big" - "strings" - - ethereum "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/accounts/abi" - "github.com/ethereum/go-ethereum/accounts/abi/bind" - "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - "github.com/ethereum/go-ethereum/event" -) - -var ( - _ = errors.New - _ = big.NewInt - _ = strings.NewReader - _ = ethereum.NotFound - _ = bind.Bind - _ = common.Big1 - _ = types.BloomLookup - _ = event.NewSubscription - _ = abi.ConvertType -) - -var TypeAndVersionInterfaceMetaData = &bind.MetaData{ - ABI: "[{\"inputs\":[],\"name\":\"typeAndVersion\",\"outputs\":[{\"internalType\":\"string\",\"name\":\"\",\"type\":\"string\"}],\"stateMutability\":\"pure\",\"type\":\"function\"}]", -} - -var TypeAndVersionInterfaceABI = TypeAndVersionInterfaceMetaData.ABI - -type TypeAndVersionInterface struct { - address common.Address - abi abi.ABI - TypeAndVersionInterfaceCaller - TypeAndVersionInterfaceTransactor - TypeAndVersionInterfaceFilterer -} - -type TypeAndVersionInterfaceCaller struct { - contract *bind.BoundContract -} - -type TypeAndVersionInterfaceTransactor struct { - contract *bind.BoundContract -} - -type TypeAndVersionInterfaceFilterer struct { - contract *bind.BoundContract -} - -type TypeAndVersionInterfaceSession struct { - Contract *TypeAndVersionInterface - CallOpts bind.CallOpts - TransactOpts bind.TransactOpts -} - -type TypeAndVersionInterfaceCallerSession struct { - Contract *TypeAndVersionInterfaceCaller - CallOpts bind.CallOpts -} - -type TypeAndVersionInterfaceTransactorSession struct { - Contract *TypeAndVersionInterfaceTransactor - TransactOpts bind.TransactOpts -} - -type TypeAndVersionInterfaceRaw struct { - Contract *TypeAndVersionInterface -} - -type TypeAndVersionInterfaceCallerRaw struct { - Contract *TypeAndVersionInterfaceCaller -} - -type TypeAndVersionInterfaceTransactorRaw struct { - Contract *TypeAndVersionInterfaceTransactor -} - -func NewTypeAndVersionInterface(address common.Address, backend bind.ContractBackend) (*TypeAndVersionInterface, error) { - abi, err := abi.JSON(strings.NewReader(TypeAndVersionInterfaceABI)) - if err != nil { - return nil, err - } - contract, err := bindTypeAndVersionInterface(address, backend, backend, backend) - if err != nil { - return nil, err - } - return &TypeAndVersionInterface{address: address, abi: abi, TypeAndVersionInterfaceCaller: TypeAndVersionInterfaceCaller{contract: contract}, TypeAndVersionInterfaceTransactor: TypeAndVersionInterfaceTransactor{contract: contract}, TypeAndVersionInterfaceFilterer: TypeAndVersionInterfaceFilterer{contract: contract}}, nil -} - -func NewTypeAndVersionInterfaceCaller(address common.Address, caller bind.ContractCaller) (*TypeAndVersionInterfaceCaller, error) { - contract, err := bindTypeAndVersionInterface(address, caller, nil, nil) - if err != nil { - return nil, err - } - return &TypeAndVersionInterfaceCaller{contract: contract}, nil -} - -func NewTypeAndVersionInterfaceTransactor(address common.Address, transactor bind.ContractTransactor) (*TypeAndVersionInterfaceTransactor, error) { - contract, err := bindTypeAndVersionInterface(address, nil, transactor, nil) - if err != nil { - return nil, err - } - return &TypeAndVersionInterfaceTransactor{contract: contract}, nil -} - -func NewTypeAndVersionInterfaceFilterer(address common.Address, filterer bind.ContractFilterer) (*TypeAndVersionInterfaceFilterer, error) { - contract, err := bindTypeAndVersionInterface(address, nil, nil, filterer) - if err != nil { - return nil, err - } - return &TypeAndVersionInterfaceFilterer{contract: contract}, nil -} - -func bindTypeAndVersionInterface(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) { - parsed, err := TypeAndVersionInterfaceMetaData.GetAbi() - if err != nil { - return nil, err - } - return bind.NewBoundContract(address, *parsed, caller, transactor, filterer), nil -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error { - return _TypeAndVersionInterface.Contract.TypeAndVersionInterfaceCaller.contract.Call(opts, result, method, params...) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) { - return _TypeAndVersionInterface.Contract.TypeAndVersionInterfaceTransactor.contract.Transfer(opts) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { - return _TypeAndVersionInterface.Contract.TypeAndVersionInterfaceTransactor.contract.Transact(opts, method, params...) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceCallerRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error { - return _TypeAndVersionInterface.Contract.contract.Call(opts, result, method, params...) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceTransactorRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) { - return _TypeAndVersionInterface.Contract.contract.Transfer(opts) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { - return _TypeAndVersionInterface.Contract.contract.Transact(opts, method, params...) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceCaller) TypeAndVersion(opts *bind.CallOpts) (string, error) { - var out []interface{} - err := _TypeAndVersionInterface.contract.Call(opts, &out, "typeAndVersion") - - if err != nil { - return *new(string), err - } - - out0 := *abi.ConvertType(out[0], new(string)).(*string) - - return out0, err - -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceSession) TypeAndVersion() (string, error) { - return _TypeAndVersionInterface.Contract.TypeAndVersion(&_TypeAndVersionInterface.CallOpts) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterfaceCallerSession) TypeAndVersion() (string, error) { - return _TypeAndVersionInterface.Contract.TypeAndVersion(&_TypeAndVersionInterface.CallOpts) -} - -func (_TypeAndVersionInterface *TypeAndVersionInterface) Address() common.Address { - return _TypeAndVersionInterface.address -} - -type TypeAndVersionInterfaceInterface interface { - TypeAndVersion(opts *bind.CallOpts) (string, error) - - Address() common.Address -} diff --git a/core/gethwrappers/generation/generated-wrapper-dependency-versions-do-not-edit.txt b/core/gethwrappers/generation/generated-wrapper-dependency-versions-do-not-edit.txt index 20b5bfbdbad..b10ad89f930 100644 --- a/core/gethwrappers/generation/generated-wrapper-dependency-versions-do-not-edit.txt +++ b/core/gethwrappers/generation/generated-wrapper-dependency-versions-do-not-edit.txt @@ -49,7 +49,6 @@ keeper_registry_wrapper1_3: ../../contracts/solc/v0.8.6/KeeperRegistry1_3/Keeper keeper_registry_wrapper2_0: ../../contracts/solc/v0.8.6/KeeperRegistry2_0/KeeperRegistry2_0.abi ../../contracts/solc/v0.8.6/KeeperRegistry2_0/KeeperRegistry2_0.bin c32dea7d5ef66b7c58ddc84ddf69aa44df1b3ae8601fbc271c95be4ff5853056 keeper_registry_wrapper_2_1: ../../contracts/solc/v0.8.16/KeeperRegistry2_1/KeeperRegistry2_1.abi ../../contracts/solc/v0.8.16/KeeperRegistry2_1/KeeperRegistry2_1.bin 11d36cb9eab0e136a2c3224709f7df17711756a126127e8c82326ce0a2e2b4f4 keepers_vrf_consumer: ../../contracts/solc/v0.8.6/KeepersVRFConsumer/KeepersVRFConsumer.abi ../../contracts/solc/v0.8.6/KeepersVRFConsumer/KeepersVRFConsumer.bin fa75572e689c9e84705c63e8dbe1b7b8aa1a8fe82d66356c4873d024bb9166e8 -log_emitter: ../../contracts/solc/v0.8.19/LogEmitter/LogEmitter.abi ../../contracts/solc/v0.8.19/LogEmitter/LogEmitter.bin 4b129ab93432c95ff9143f0631323e189887668889e0b36ccccf18a571e41ccf log_triggered_streams_lookup_wrapper: ../../contracts/solc/v0.8.16/LogTriggeredStreamsLookup/LogTriggeredStreamsLookup.abi ../../contracts/solc/v0.8.16/LogTriggeredStreamsLookup/LogTriggeredStreamsLookup.bin 920fff3b662909f12ed11b47d168036ffa74ad52070a94e2fa26cdad5e428b4e log_upkeep_counter_wrapper: ../../contracts/solc/v0.8.6/LogUpkeepCounter/LogUpkeepCounter.abi ../../contracts/solc/v0.8.6/LogUpkeepCounter/LogUpkeepCounter.bin 5482033d55eddb653bf580de0cc950db89a329091e085ac4122583df4a9777cd mock_aggregator_proxy: ../../contracts/solc/v0.8.6/MockAggregatorProxy/MockAggregatorProxy.abi ../../contracts/solc/v0.8.6/MockAggregatorProxy/MockAggregatorProxy.bin b16c108f3dd384c342ddff5e94da7c0a8d39d1be5e3d8f2cf61ecc7f0e50ff42 @@ -67,7 +66,6 @@ solidity_vrf_v08_verifier_wrapper: ../../contracts/solc/v0.8.6/VRFTestHelper/VRF streams_lookup_compatible_interface: ../../contracts/solc/v0.8.16/StreamsLookupCompatibleInterface/StreamsLookupCompatibleInterface.abi ../../contracts/solc/v0.8.16/StreamsLookupCompatibleInterface/StreamsLookupCompatibleInterface.bin 2861f553fb4731e89126b13319462df674727005a51982d1e617e2c2e44fa422 streams_lookup_upkeep_wrapper: ../../contracts/solc/v0.8.16/StreamsLookupUpkeep/StreamsLookupUpkeep.abi ../../contracts/solc/v0.8.16/StreamsLookupUpkeep/StreamsLookupUpkeep.bin 37e3a61091cc2a156539dd4aaff987e07577118aa02e97931a647df55705465e trusted_blockhash_store: ../../contracts/solc/v0.8.19/TrustedBlockhashStore/TrustedBlockhashStore.abi ../../contracts/solc/v0.8.19/TrustedBlockhashStore/TrustedBlockhashStore.bin 1570663ef6feabf8660a93e85d2427ad8e7dabcfa5b418d308c62132451c5662 -type_and_version_interface_wrapper: ../../contracts/solc/v0.8.6/KeeperRegistry1_2/TypeAndVersionInterface.abi ../../contracts/solc/v0.8.6/KeeperRegistry1_2/TypeAndVersionInterface.bin bc9c3a6e73e3ebd5b58754df0deeb3b33f4bb404d5709bb904aed51d32f4b45e upkeep_counter_wrapper: ../../contracts/solc/v0.8.16/UpkeepCounter/UpkeepCounter.abi ../../contracts/solc/v0.8.16/UpkeepCounter/UpkeepCounter.bin cef953186d12ac802e54d17c897d01605b60bbe0ce2df3b4cf2c31c5c3168b35 upkeep_perform_counter_restrictive_wrapper: ../../contracts/solc/v0.8.16/UpkeepPerformCounterRestrictive/UpkeepPerformCounterRestrictive.abi ../../contracts/solc/v0.8.16/UpkeepPerformCounterRestrictive/UpkeepPerformCounterRestrictive.bin 20955b21acceb58355fa287b29194a73edf5937067ba7140667301017cb2b24c upkeep_transcoder: ../../contracts/solc/v0.8.6/UpkeepTranscoder/UpkeepTranscoder.abi ../../contracts/solc/v0.8.6/UpkeepTranscoder/UpkeepTranscoder.bin 336c92a981597be26508455f81a908a0784a817b129a59686c5b2c4afcba730a @@ -90,7 +88,6 @@ vrf_external_sub_owner_example: ../../contracts/solc/v0.8.6/VRFExternalSubOwnerE vrf_load_test_external_sub_owner: ../../contracts/solc/v0.8.6/VRFLoadTestExternalSubOwner/VRFLoadTestExternalSubOwner.abi ../../contracts/solc/v0.8.6/VRFLoadTestExternalSubOwner/VRFLoadTestExternalSubOwner.bin 2097faa70265e420036cc8a3efb1f1e0836ad2d7323b295b9a26a125dbbe6c7d vrf_load_test_ownerless_consumer: ../../contracts/solc/v0.8.6/VRFLoadTestOwnerlessConsumer/VRFLoadTestOwnerlessConsumer.abi ../../contracts/solc/v0.8.6/VRFLoadTestOwnerlessConsumer/VRFLoadTestOwnerlessConsumer.bin 74f914843cbc70b9c3079c3e1c709382ce415225e8bb40113e7ac018bfcb0f5c vrf_load_test_with_metrics: ../../contracts/solc/v0.8.6/VRFV2LoadTestWithMetrics/VRFV2LoadTestWithMetrics.abi ../../contracts/solc/v0.8.6/VRFV2LoadTestWithMetrics/VRFV2LoadTestWithMetrics.bin c9621c52d216a090ff6bbe942f1b75d2bce8658a27323c3789e5e14b523277ee -vrf_log_emitter: ../../contracts/solc/v0.8.19/VRFLogEmitter/VRFLogEmitter.abi ../../contracts/solc/v0.8.19/VRFLogEmitter/VRFLogEmitter.bin 15f491d445ac4d0c712d1cbe4e5054c759b080bf20de7d54bfe2a82cde4dcf06 vrf_malicious_consumer_v2: ../../contracts/solc/v0.8.6/VRFMaliciousConsumerV2/VRFMaliciousConsumerV2.abi ../../contracts/solc/v0.8.6/VRFMaliciousConsumerV2/VRFMaliciousConsumerV2.bin 9755fa8ffc7f5f0b337d5d413d77b0c9f6cd6f68c31727d49acdf9d4a51bc522 vrf_malicious_consumer_v2_plus: ../../contracts/solc/v0.8.19/VRFMaliciousConsumerV2Plus/VRFMaliciousConsumerV2Plus.abi ../../contracts/solc/v0.8.19/VRFMaliciousConsumerV2Plus/VRFMaliciousConsumerV2Plus.bin f6bf81658d3472bb705d28dc4a837097ec93d78c3f786efaa9cd040ada9d3319 vrf_mock_ethlink_aggregator: ../../contracts/solc/v0.8.6/VRFMockETHLINKAggregator/VRFMockETHLINKAggregator.abi ../../contracts/solc/v0.8.6/VRFMockETHLINKAggregator/VRFMockETHLINKAggregator.bin 3657f8c552147eb55d7538fa7d8012c1a983d8c5184610de60600834a72e006b diff --git a/core/gethwrappers/go_generate.go b/core/gethwrappers/go_generate.go index 1fee016fe8b..ab610f01d67 100644 --- a/core/gethwrappers/go_generate.go +++ b/core/gethwrappers/go_generate.go @@ -17,7 +17,6 @@ package gethwrappers //go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistrar1_2/KeeperRegistrar.abi ../../contracts/solc/v0.8.6/KeeperRegistrar1_2/KeeperRegistrar.bin KeeperRegistrar keeper_registrar_wrapper1_2 //go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistrar1_2Mock/KeeperRegistrar1_2Mock.abi ../../contracts/solc/v0.8.6/KeeperRegistrar1_2Mock/KeeperRegistrar1_2Mock.bin KeeperRegistrarMock keeper_registrar_wrapper1_2_mock //go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistry1_2/KeeperRegistry1_2.abi ../../contracts/solc/v0.8.6/KeeperRegistry1_2/KeeperRegistry1_2.bin KeeperRegistry keeper_registry_wrapper1_2 -//go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistry1_2/TypeAndVersionInterface.abi ../../contracts/solc/v0.8.6/KeeperRegistry1_2/TypeAndVersionInterface.bin TypeAndVersionInterface type_and_version_interface_wrapper //go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistryCheckUpkeepGasUsageWrapper1_2/KeeperRegistryCheckUpkeepGasUsageWrapper1_2.abi ../../contracts/solc/v0.8.6/KeeperRegistryCheckUpkeepGasUsageWrapper1_2/KeeperRegistryCheckUpkeepGasUsageWrapper1_2.bin KeeperRegistryCheckUpkeepGasUsageWrapper gas_wrapper //go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistryCheckUpkeepGasUsageWrapper1_2Mock/KeeperRegistryCheckUpkeepGasUsageWrapper1_2Mock.abi ../../contracts/solc/v0.8.6/KeeperRegistryCheckUpkeepGasUsageWrapper1_2Mock/KeeperRegistryCheckUpkeepGasUsageWrapper1_2Mock.bin KeeperRegistryCheckUpkeepGasUsageWrapperMock gas_wrapper_mock //go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.6/KeeperRegistry1_3/KeeperRegistry1_3.abi ../../contracts/solc/v0.8.6/KeeperRegistry1_3/KeeperRegistry1_3.bin KeeperRegistry keeper_registry_wrapper1_3 diff --git a/core/gethwrappers/go_generate_logpoller.go b/core/gethwrappers/go_generate_logpoller.go deleted file mode 100644 index b28b8205830..00000000000 --- a/core/gethwrappers/go_generate_logpoller.go +++ /dev/null @@ -1,7 +0,0 @@ -// Package gethwrappers provides tools for wrapping solidity contracts with -// golang packages, using abigen. -package gethwrappers - -// Log tester -//go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.19/LogEmitter/LogEmitter.abi ../../contracts/solc/v0.8.19/LogEmitter/LogEmitter.bin LogEmitter log_emitter -//go:generate go run ./generation/generate/wrap.go ../../contracts/solc/v0.8.19/VRFLogEmitter/VRFLogEmitter.abi ../../contracts/solc/v0.8.19/VRFLogEmitter/VRFLogEmitter.bin VRFLogEmitter vrf_log_emitter diff --git a/core/gethwrappers/generated/log_emitter/log_emitter.go b/core/gethwrappers/shared/generated/log_emitter/log_emitter.go similarity index 93% rename from core/gethwrappers/generated/log_emitter/log_emitter.go rename to core/gethwrappers/shared/generated/log_emitter/log_emitter.go index 24fef257af3..6ae06d7f08d 100644 --- a/core/gethwrappers/generated/log_emitter/log_emitter.go +++ b/core/gethwrappers/shared/generated/log_emitter/log_emitter.go @@ -31,7 +31,7 @@ var ( ) var LogEmitterMetaData = &bind.MetaData{ - ABI: "[{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"Log1\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"Log2\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":false,\"internalType\":\"string\",\"name\":\"\",\"type\":\"string\"}],\"name\":\"Log3\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"\",\"type\":\"uint256\"}],\"name\":\"Log4\",\"type\":\"event\"},{\"inputs\":[{\"internalType\":\"uint256[]\",\"name\":\"v\",\"type\":\"uint256[]\"}],\"name\":\"EmitLog1\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256[]\",\"name\":\"v\",\"type\":\"uint256[]\"}],\"name\":\"EmitLog2\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"string[]\",\"name\":\"v\",\"type\":\"string[]\"}],\"name\":\"EmitLog3\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"v\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"w\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"c\",\"type\":\"uint256\"}],\"name\":\"EmitLog4\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"}]", + ABI: "[{\"type\":\"function\",\"name\":\"EmitLog1\",\"inputs\":[{\"name\":\"v\",\"type\":\"uint256[]\",\"internalType\":\"uint256[]\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\"},{\"type\":\"function\",\"name\":\"EmitLog2\",\"inputs\":[{\"name\":\"v\",\"type\":\"uint256[]\",\"internalType\":\"uint256[]\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\"},{\"type\":\"function\",\"name\":\"EmitLog3\",\"inputs\":[{\"name\":\"v\",\"type\":\"string[]\",\"internalType\":\"string[]\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\"},{\"type\":\"function\",\"name\":\"EmitLog4\",\"inputs\":[{\"name\":\"v\",\"type\":\"uint256\",\"internalType\":\"uint256\"},{\"name\":\"w\",\"type\":\"uint256\",\"internalType\":\"uint256\"},{\"name\":\"c\",\"type\":\"uint256\",\"internalType\":\"uint256\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\"},{\"type\":\"event\",\"name\":\"Log1\",\"inputs\":[{\"name\":\"\",\"type\":\"uint256\",\"indexed\":false,\"internalType\":\"uint256\"}],\"anonymous\":false},{\"type\":\"event\",\"name\":\"Log2\",\"inputs\":[{\"name\":\"\",\"type\":\"uint256\",\"indexed\":true,\"internalType\":\"uint256\"}],\"anonymous\":false},{\"type\":\"event\",\"name\":\"Log3\",\"inputs\":[{\"name\":\"\",\"type\":\"string\",\"indexed\":false,\"internalType\":\"string\"}],\"anonymous\":false},{\"type\":\"event\",\"name\":\"Log4\",\"inputs\":[{\"name\":\"\",\"type\":\"uint256\",\"indexed\":true,\"internalType\":\"uint256\"},{\"name\":\"\",\"type\":\"uint256\",\"indexed\":true,\"internalType\":\"uint256\"}],\"anonymous\":false}]", Bin: "0x608060405234801561001057600080fd5b506105c5806100206000396000f3fe608060405234801561001057600080fd5b506004361061004c5760003560e01c8063696933c914610051578063b4b12d9814610066578063bc253bc014610079578063d9c21f461461008c575b600080fd5b61006461005f3660046102d7565b61009f565b005b61006461007436600461036d565b610113565b6100646100873660046102d7565b610163565b61006461009a366004610399565b6101c7565b60005b815181101561010f577f46692c0e59ca9cd1ad8f984a9d11715ec83424398b7eed4e05c8ce84662415a88282815181106100de576100de6104be565b60200260200101516040516100f591815260200190565b60405180910390a180610107816104ed565b9150506100a2565b5050565b60005b8181101561015d57604051839085907fba21d5b63d64546cb4ab29e370a8972bf26f78cb0c395391b4f451699fdfdc5d90600090a380610155816104ed565b915050610116565b50505050565b60005b815181101561010f57818181518110610181576101816104be565b60200260200101517f624fb00c2ce79f34cb543884c3af64816dce0f4cec3d32661959e49d488a7a9360405160405180910390a2806101bf816104ed565b915050610166565b60005b815181101561010f577fb94ec34dfe32a8a7170992a093976368d1e63decf8f0bc0b38a8eb89cc9f95cf828281518110610206576102066104be565b602002602001015160405161021b919061054c565b60405180910390a18061022d816104ed565b9150506101ca565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe016810167ffffffffffffffff811182821017156102ab576102ab610235565b604052919050565b600067ffffffffffffffff8211156102cd576102cd610235565b5060051b60200190565b600060208083850312156102ea57600080fd5b823567ffffffffffffffff81111561030157600080fd5b8301601f8101851361031257600080fd5b8035610325610320826102b3565b610264565b81815260059190911b8201830190838101908783111561034457600080fd5b928401925b8284101561036257833582529284019290840190610349565b979650505050505050565b60008060006060848603121561038257600080fd5b505081359360208301359350604090920135919050565b600060208083850312156103ac57600080fd5b823567ffffffffffffffff808211156103c457600080fd5b8185019150601f86818401126103d957600080fd5b82356103e7610320826102b3565b81815260059190911b8401850190858101908983111561040657600080fd5b8686015b838110156104b0578035868111156104225760008081fd5b8701603f81018c136104345760008081fd5b8881013560408882111561044a5761044a610235565b6104798b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe08a85011601610264565b8281528e8284860101111561048e5760008081fd5b828285018d83013760009281018c01929092525084525091870191870161040a565b509998505050505050505050565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052603260045260246000fd5b60007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8203610545577f4e487b7100000000000000000000000000000000000000000000000000000000600052601160045260246000fd5b5060010190565b600060208083528351808285015260005b818110156105795785810183015185820160400152820161055d565b5060006040828601015260407fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0601f830116850101925050509291505056fea164736f6c6343000813000a", } diff --git a/core/gethwrappers/shared/generated/type_and_version/type_and_version.go b/core/gethwrappers/shared/generated/type_and_version/type_and_version.go new file mode 100644 index 00000000000..a4a518d9ea2 --- /dev/null +++ b/core/gethwrappers/shared/generated/type_and_version/type_and_version.go @@ -0,0 +1,183 @@ +// Code generated - DO NOT EDIT. +// This file is a generated binding and any manual changes will be lost. + +package type_and_version + +import ( + "errors" + "math/big" + "strings" + + ethereum "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/accounts/abi" + "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + "github.com/ethereum/go-ethereum/event" +) + +var ( + _ = errors.New + _ = big.NewInt + _ = strings.NewReader + _ = ethereum.NotFound + _ = bind.Bind + _ = common.Big1 + _ = types.BloomLookup + _ = event.NewSubscription + _ = abi.ConvertType +) + +var ITypeAndVersionMetaData = &bind.MetaData{ + ABI: "[{\"type\":\"function\",\"name\":\"typeAndVersion\",\"inputs\":[],\"outputs\":[{\"name\":\"\",\"type\":\"string\",\"internalType\":\"string\"}],\"stateMutability\":\"pure\"}]", +} + +var ITypeAndVersionABI = ITypeAndVersionMetaData.ABI + +type ITypeAndVersion struct { + address common.Address + abi abi.ABI + ITypeAndVersionCaller + ITypeAndVersionTransactor + ITypeAndVersionFilterer +} + +type ITypeAndVersionCaller struct { + contract *bind.BoundContract +} + +type ITypeAndVersionTransactor struct { + contract *bind.BoundContract +} + +type ITypeAndVersionFilterer struct { + contract *bind.BoundContract +} + +type ITypeAndVersionSession struct { + Contract *ITypeAndVersion + CallOpts bind.CallOpts + TransactOpts bind.TransactOpts +} + +type ITypeAndVersionCallerSession struct { + Contract *ITypeAndVersionCaller + CallOpts bind.CallOpts +} + +type ITypeAndVersionTransactorSession struct { + Contract *ITypeAndVersionTransactor + TransactOpts bind.TransactOpts +} + +type ITypeAndVersionRaw struct { + Contract *ITypeAndVersion +} + +type ITypeAndVersionCallerRaw struct { + Contract *ITypeAndVersionCaller +} + +type ITypeAndVersionTransactorRaw struct { + Contract *ITypeAndVersionTransactor +} + +func NewITypeAndVersion(address common.Address, backend bind.ContractBackend) (*ITypeAndVersion, error) { + abi, err := abi.JSON(strings.NewReader(ITypeAndVersionABI)) + if err != nil { + return nil, err + } + contract, err := bindITypeAndVersion(address, backend, backend, backend) + if err != nil { + return nil, err + } + return &ITypeAndVersion{address: address, abi: abi, ITypeAndVersionCaller: ITypeAndVersionCaller{contract: contract}, ITypeAndVersionTransactor: ITypeAndVersionTransactor{contract: contract}, ITypeAndVersionFilterer: ITypeAndVersionFilterer{contract: contract}}, nil +} + +func NewITypeAndVersionCaller(address common.Address, caller bind.ContractCaller) (*ITypeAndVersionCaller, error) { + contract, err := bindITypeAndVersion(address, caller, nil, nil) + if err != nil { + return nil, err + } + return &ITypeAndVersionCaller{contract: contract}, nil +} + +func NewITypeAndVersionTransactor(address common.Address, transactor bind.ContractTransactor) (*ITypeAndVersionTransactor, error) { + contract, err := bindITypeAndVersion(address, nil, transactor, nil) + if err != nil { + return nil, err + } + return &ITypeAndVersionTransactor{contract: contract}, nil +} + +func NewITypeAndVersionFilterer(address common.Address, filterer bind.ContractFilterer) (*ITypeAndVersionFilterer, error) { + contract, err := bindITypeAndVersion(address, nil, nil, filterer) + if err != nil { + return nil, err + } + return &ITypeAndVersionFilterer{contract: contract}, nil +} + +func bindITypeAndVersion(address common.Address, caller bind.ContractCaller, transactor bind.ContractTransactor, filterer bind.ContractFilterer) (*bind.BoundContract, error) { + parsed, err := ITypeAndVersionMetaData.GetAbi() + if err != nil { + return nil, err + } + return bind.NewBoundContract(address, *parsed, caller, transactor, filterer), nil +} + +func (_ITypeAndVersion *ITypeAndVersionRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error { + return _ITypeAndVersion.Contract.ITypeAndVersionCaller.contract.Call(opts, result, method, params...) +} + +func (_ITypeAndVersion *ITypeAndVersionRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) { + return _ITypeAndVersion.Contract.ITypeAndVersionTransactor.contract.Transfer(opts) +} + +func (_ITypeAndVersion *ITypeAndVersionRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { + return _ITypeAndVersion.Contract.ITypeAndVersionTransactor.contract.Transact(opts, method, params...) +} + +func (_ITypeAndVersion *ITypeAndVersionCallerRaw) Call(opts *bind.CallOpts, result *[]interface{}, method string, params ...interface{}) error { + return _ITypeAndVersion.Contract.contract.Call(opts, result, method, params...) +} + +func (_ITypeAndVersion *ITypeAndVersionTransactorRaw) Transfer(opts *bind.TransactOpts) (*types.Transaction, error) { + return _ITypeAndVersion.Contract.contract.Transfer(opts) +} + +func (_ITypeAndVersion *ITypeAndVersionTransactorRaw) Transact(opts *bind.TransactOpts, method string, params ...interface{}) (*types.Transaction, error) { + return _ITypeAndVersion.Contract.contract.Transact(opts, method, params...) +} + +func (_ITypeAndVersion *ITypeAndVersionCaller) TypeAndVersion(opts *bind.CallOpts) (string, error) { + var out []interface{} + err := _ITypeAndVersion.contract.Call(opts, &out, "typeAndVersion") + + if err != nil { + return *new(string), err + } + + out0 := *abi.ConvertType(out[0], new(string)).(*string) + + return out0, err + +} + +func (_ITypeAndVersion *ITypeAndVersionSession) TypeAndVersion() (string, error) { + return _ITypeAndVersion.Contract.TypeAndVersion(&_ITypeAndVersion.CallOpts) +} + +func (_ITypeAndVersion *ITypeAndVersionCallerSession) TypeAndVersion() (string, error) { + return _ITypeAndVersion.Contract.TypeAndVersion(&_ITypeAndVersion.CallOpts) +} + +func (_ITypeAndVersion *ITypeAndVersion) Address() common.Address { + return _ITypeAndVersion.address +} + +type ITypeAndVersionInterface interface { + TypeAndVersion(opts *bind.CallOpts) (string, error) + + Address() common.Address +} diff --git a/core/gethwrappers/generated/vrf_log_emitter/vrf_log_emitter.go b/core/gethwrappers/shared/generated/vrf_log_emitter/vrf_log_emitter.go similarity index 88% rename from core/gethwrappers/generated/vrf_log_emitter/vrf_log_emitter.go rename to core/gethwrappers/shared/generated/vrf_log_emitter/vrf_log_emitter.go index 2cdeaa6c3a8..db6fae033a8 100644 --- a/core/gethwrappers/generated/vrf_log_emitter/vrf_log_emitter.go +++ b/core/gethwrappers/shared/generated/vrf_log_emitter/vrf_log_emitter.go @@ -31,7 +31,7 @@ var ( ) var VRFLogEmitterMetaData = &bind.MetaData{ - ABI: "[{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"uint256\",\"name\":\"requestId\",\"type\":\"uint256\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"outputSeed\",\"type\":\"uint256\"},{\"indexed\":false,\"internalType\":\"uint96\",\"name\":\"payment\",\"type\":\"uint96\"},{\"indexed\":false,\"internalType\":\"bool\",\"name\":\"success\",\"type\":\"bool\"}],\"name\":\"RandomWordsFulfilled\",\"type\":\"event\"},{\"anonymous\":false,\"inputs\":[{\"indexed\":true,\"internalType\":\"bytes32\",\"name\":\"keyHash\",\"type\":\"bytes32\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"requestId\",\"type\":\"uint256\"},{\"indexed\":false,\"internalType\":\"uint256\",\"name\":\"preSeed\",\"type\":\"uint256\"},{\"indexed\":true,\"internalType\":\"uint64\",\"name\":\"subId\",\"type\":\"uint64\"},{\"indexed\":false,\"internalType\":\"uint16\",\"name\":\"minimumRequestConfirmations\",\"type\":\"uint16\"},{\"indexed\":false,\"internalType\":\"uint32\",\"name\":\"callbackGasLimit\",\"type\":\"uint32\"},{\"indexed\":false,\"internalType\":\"uint32\",\"name\":\"numWords\",\"type\":\"uint32\"},{\"indexed\":true,\"internalType\":\"address\",\"name\":\"sender\",\"type\":\"address\"}],\"name\":\"RandomWordsRequested\",\"type\":\"event\"},{\"inputs\":[{\"internalType\":\"uint256\",\"name\":\"requestId\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"outputSeed\",\"type\":\"uint256\"},{\"internalType\":\"uint96\",\"name\":\"payment\",\"type\":\"uint96\"},{\"internalType\":\"bool\",\"name\":\"success\",\"type\":\"bool\"}],\"name\":\"emitRandomWordsFulfilled\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"keyHash\",\"type\":\"bytes32\"},{\"internalType\":\"uint256\",\"name\":\"requestId\",\"type\":\"uint256\"},{\"internalType\":\"uint256\",\"name\":\"preSeed\",\"type\":\"uint256\"},{\"internalType\":\"uint64\",\"name\":\"subId\",\"type\":\"uint64\"},{\"internalType\":\"uint16\",\"name\":\"minimumRequestConfirmations\",\"type\":\"uint16\"},{\"internalType\":\"uint32\",\"name\":\"callbackGasLimit\",\"type\":\"uint32\"},{\"internalType\":\"uint32\",\"name\":\"numWords\",\"type\":\"uint32\"},{\"internalType\":\"address\",\"name\":\"sender\",\"type\":\"address\"}],\"name\":\"emitRandomWordsRequested\",\"outputs\":[],\"stateMutability\":\"nonpayable\",\"type\":\"function\"}]", + ABI: "[{\"type\":\"function\",\"name\":\"emitRandomWordsFulfilled\",\"inputs\":[{\"name\":\"requestId\",\"type\":\"uint256\",\"internalType\":\"uint256\"},{\"name\":\"outputSeed\",\"type\":\"uint256\",\"internalType\":\"uint256\"},{\"name\":\"payment\",\"type\":\"uint96\",\"internalType\":\"uint96\"},{\"name\":\"success\",\"type\":\"bool\",\"internalType\":\"bool\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\"},{\"type\":\"function\",\"name\":\"emitRandomWordsRequested\",\"inputs\":[{\"name\":\"keyHash\",\"type\":\"bytes32\",\"internalType\":\"bytes32\"},{\"name\":\"requestId\",\"type\":\"uint256\",\"internalType\":\"uint256\"},{\"name\":\"preSeed\",\"type\":\"uint256\",\"internalType\":\"uint256\"},{\"name\":\"subId\",\"type\":\"uint64\",\"internalType\":\"uint64\"},{\"name\":\"minimumRequestConfirmations\",\"type\":\"uint16\",\"internalType\":\"uint16\"},{\"name\":\"callbackGasLimit\",\"type\":\"uint32\",\"internalType\":\"uint32\"},{\"name\":\"numWords\",\"type\":\"uint32\",\"internalType\":\"uint32\"},{\"name\":\"sender\",\"type\":\"address\",\"internalType\":\"address\"}],\"outputs\":[],\"stateMutability\":\"nonpayable\"},{\"type\":\"event\",\"name\":\"RandomWordsFulfilled\",\"inputs\":[{\"name\":\"requestId\",\"type\":\"uint256\",\"indexed\":true,\"internalType\":\"uint256\"},{\"name\":\"outputSeed\",\"type\":\"uint256\",\"indexed\":false,\"internalType\":\"uint256\"},{\"name\":\"payment\",\"type\":\"uint96\",\"indexed\":false,\"internalType\":\"uint96\"},{\"name\":\"success\",\"type\":\"bool\",\"indexed\":false,\"internalType\":\"bool\"}],\"anonymous\":false},{\"type\":\"event\",\"name\":\"RandomWordsRequested\",\"inputs\":[{\"name\":\"keyHash\",\"type\":\"bytes32\",\"indexed\":true,\"internalType\":\"bytes32\"},{\"name\":\"requestId\",\"type\":\"uint256\",\"indexed\":false,\"internalType\":\"uint256\"},{\"name\":\"preSeed\",\"type\":\"uint256\",\"indexed\":false,\"internalType\":\"uint256\"},{\"name\":\"subId\",\"type\":\"uint64\",\"indexed\":true,\"internalType\":\"uint64\"},{\"name\":\"minimumRequestConfirmations\",\"type\":\"uint16\",\"indexed\":false,\"internalType\":\"uint16\"},{\"name\":\"callbackGasLimit\",\"type\":\"uint32\",\"indexed\":false,\"internalType\":\"uint32\"},{\"name\":\"numWords\",\"type\":\"uint32\",\"indexed\":false,\"internalType\":\"uint32\"},{\"name\":\"sender\",\"type\":\"address\",\"indexed\":true,\"internalType\":\"address\"}],\"anonymous\":false}]", Bin: "0x608060405234801561001057600080fd5b5061027f806100206000396000f3fe608060405234801561001057600080fd5b50600436106100365760003560e01c8063ca920adb1461003b578063fe62d3e914610050575b600080fd5b61004e61004936600461015b565b610063565b005b61004e61005e366004610212565b6100eb565b604080518881526020810188905261ffff86168183015263ffffffff858116606083015284166080820152905173ffffffffffffffffffffffffffffffffffffffff83169167ffffffffffffffff8816918b917f63373d1c4696214b898952999c9aaec57dac1ee2723cec59bea6888f489a9772919081900360a00190a45050505050505050565b604080518481526bffffffffffffffffffffffff8416602082015282151581830152905185917f7dffc5ae5ee4e2e4df1651cf6ad329a73cebdb728f37ea0187b9b17e036756e4919081900360600190a250505050565b803563ffffffff8116811461015657600080fd5b919050565b600080600080600080600080610100898b03121561017857600080fd5b883597506020890135965060408901359550606089013567ffffffffffffffff811681146101a557600080fd5b9450608089013561ffff811681146101bc57600080fd5b93506101ca60a08a01610142565b92506101d860c08a01610142565b915060e089013573ffffffffffffffffffffffffffffffffffffffff8116811461020157600080fd5b809150509295985092959890939650565b6000806000806080858703121561022857600080fd5b843593506020850135925060408501356bffffffffffffffffffffffff8116811461025257600080fd5b91506060850135801515811461026757600080fd5b93969295509093505056fea164736f6c6343000813000a", } diff --git a/core/gethwrappers/shared/generation/generated-wrapper-dependency-versions-do-not-edit.txt b/core/gethwrappers/shared/generation/generated-wrapper-dependency-versions-do-not-edit.txt index 1c333b653ef..9b7ba5f8832 100644 --- a/core/gethwrappers/shared/generation/generated-wrapper-dependency-versions-do-not-edit.txt +++ b/core/gethwrappers/shared/generation/generated-wrapper-dependency-versions-do-not-edit.txt @@ -5,6 +5,9 @@ burn_mint_erc677: ../../../contracts/solc/shared/BurnMintERC677/BurnMintERC677.s chain_reader_tester: ../../../contracts/solc/shared/ChainReaderTester/ChainReaderTester.sol/ChainReaderTester.abi.json ../../../contracts/solc/shared/ChainReaderTester/ChainReaderTester.sol/ChainReaderTester.bin 876c55e8d2556dc9cc953c786ae72b0430cb2c992f84573a2aae9680068f293d erc20: ../../../contracts/solc/vendor/ERC20/ERC20.sol/ERC20.abi.json ../../../contracts/solc/vendor/ERC20/ERC20.sol/ERC20.bin 9a5e3f7ec9fea385eeba374d184d6b83784304f537a90f6b81827c732d0b37c4 link_token: ../../../contracts/solc/shared/LinkToken/LinkToken.sol/LinkToken.abi.json ../../../contracts/solc/shared/LinkToken/LinkToken.sol/LinkToken.bin 9d1c648233822b70b03bf4fdb1af4cffaead8f1391dd149a79b3072defbd0c62 -mock_v3_aggregator_contract: ../../../contracts/solc/tests/MockV3Aggregator/MockV3Aggregator.sol/MockV3Aggregator.abi.json ../../../contracts/solc/tests/MockV3Aggregator/MockV3Aggregator.sol/MockV3Aggregator.bin 76796e0faffb2981d49082d94f2f2c9ec87d8ad960b022993d0681f9c81a832d +log_emitter: ../../../contracts/solc/shared/LogEmitter/LogEmitter.sol/LogEmitter.abi.json ../../../contracts/solc/shared/LogEmitter/LogEmitter.sol/LogEmitter.bin f884ed34204f82dcd1ea8f20db1b24d410bf23ab2687d56968d2c670e98277dd +mock_v3_aggregator_contract: ../../../contracts/solc/shared/MockV3Aggregator/MockV3Aggregator.sol/MockV3Aggregator.abi.json ../../../contracts/solc/shared/MockV3Aggregator/MockV3Aggregator.sol/MockV3Aggregator.bin 76796e0faffb2981d49082d94f2f2c9ec87d8ad960b022993d0681f9c81a832d multicall3: ../../../contracts/solc/vendor/Multicall3/Multicall3.sol/Multicall3.abi.json ../../../contracts/solc/vendor/Multicall3/Multicall3.sol/Multicall3.bin 175cd8790a4c714790c3761c50b0e93694c71bb7f8897eb92150847e6d8a94f4 +type_and_version: ../../../contracts/solc/shared/ITypeAndVersion/ITypeAndVersion.sol/ITypeAndVersion.abi.json ../../../contracts/solc/shared/ITypeAndVersion/ITypeAndVersion.sol/ITypeAndVersion.bin 21f6da4daa754971a4fdafea90ec64a77a5f03e62f9a9639802726b22eaa380a +vrf_log_emitter: ../../../contracts/solc/shared/VRFLogEmitter/VRFLogEmitter.sol/VRFLogEmitter.abi.json ../../../contracts/solc/shared/VRFLogEmitter/VRFLogEmitter.sol/VRFLogEmitter.bin 46788c9519425dd23befdea8e561ee454dcb559f6a8fe70f4a092805574218f6 werc20_mock: ../../../contracts/solc/shared/WERC20Mock/WERC20Mock.sol/WERC20Mock.abi.json ../../../contracts/solc/shared/WERC20Mock/WERC20Mock.sol/WERC20Mock.bin f5ba13fc99c248354508e3bab6cd0fb66607d3b7377f59a1e80b930e96ed4f48 diff --git a/core/gethwrappers/shared/go_generate.go b/core/gethwrappers/shared/go_generate.go index 3ac9b8ac6e9..0881e1b31e3 100644 --- a/core/gethwrappers/shared/go_generate.go +++ b/core/gethwrappers/shared/go_generate.go @@ -8,7 +8,10 @@ package gethwrappers //go:generate go run ../generation/wrap.go shared WERC20Mock werc20_mock //go:generate go run ../generation/wrap.go shared ChainReaderTester chain_reader_tester //go:generate go run ../generation/wrap.go shared AggregatorV3Interface aggregator_v3_interface +//go:generate go run ../generation/wrap.go shared MockV3Aggregator mock_v3_aggregator_contract +//go:generate go run ../generation/wrap.go shared LogEmitter log_emitter +//go:generate go run ../generation/wrap.go shared VRFLogEmitter vrf_log_emitter +//go:generate go run ../generation/wrap.go shared ITypeAndVersion type_and_version //go:generate go run ../generation/wrap.go vendor ERC20 erc20 //go:generate go run ../generation/wrap.go vendor Multicall3 multicall3 -//go:generate go run ../generation/wrap.go tests MockV3Aggregator mock_v3_aggregator_contract diff --git a/core/services/keeper/registry_interface.go b/core/services/keeper/registry_interface.go index 04bcb8e257d..b37917cef60 100644 --- a/core/services/keeper/registry_interface.go +++ b/core/services/keeper/registry_interface.go @@ -16,7 +16,7 @@ import ( registry1_1 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/keeper_registry_wrapper1_1" registry1_2 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/keeper_registry_wrapper1_2" registry1_3 "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/keeper_registry_wrapper1_3" - type_and_version "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/type_and_version_interface_wrapper" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/type_and_version" ) type RegistryVersion int32 @@ -61,14 +61,14 @@ type RegistryWrapper struct { } func NewRegistryWrapper(address evmtypes.EIP55Address, evmClient evmclient.Client) (*RegistryWrapper, error) { - interface_wrapper, err := type_and_version.NewTypeAndVersionInterface( + interfaceWrapper, err := type_and_version.NewITypeAndVersion( address.Address(), evmClient, ) if err != nil { return nil, errors.Wrap(err, "unable to create type and interface wrapper") } - version, err := getRegistryVersion(interface_wrapper) + version, err := getRegistryVersion(interfaceWrapper) if err != nil { return nil, errors.Wrap(err, "unable to determine version of keeper registry contract") } @@ -105,7 +105,7 @@ func NewRegistryWrapper(address evmtypes.EIP55Address, evmClient evmclient.Clien }, nil } -func getRegistryVersion(contract *type_and_version.TypeAndVersionInterface) (*RegistryVersion, error) { +func getRegistryVersion(contract *type_and_version.ITypeAndVersion) (*RegistryVersion, error) { typeAndVersion, err := contract.TypeAndVersion(nil) if err != nil { jsonErr := evmclient.ExtractRPCErrorOrNil(err) diff --git a/core/services/ocr2/plugins/ccip/config/type_and_version.go b/core/services/ocr2/plugins/ccip/config/type_and_version.go index fdfd892b087..9d5e1629c11 100644 --- a/core/services/ocr2/plugins/ccip/config/type_and_version.go +++ b/core/services/ocr2/plugins/ccip/config/type_and_version.go @@ -9,7 +9,7 @@ import ( "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" - type_and_version "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/type_and_version_interface_wrapper" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/type_and_version" ) type ContractType string @@ -39,7 +39,7 @@ func VerifyTypeAndVersion(addr common.Address, client bind.ContractBackend, expe } func TypeAndVersion(addr common.Address, client bind.ContractBackend) (ContractType, semver.Version, error) { - tv, err := type_and_version.NewTypeAndVersionInterface(addr, client) + tv, err := type_and_version.NewITypeAndVersion(addr, client) if err != nil { return "", semver.Version{}, err } diff --git a/core/services/ocr2/plugins/ccip/internal/ccipdata/batchreader/token_pool_batch_reader.go b/core/services/ocr2/plugins/ccip/internal/ccipdata/batchreader/token_pool_batch_reader.go index 32ec1b24ac9..6d5d000e1fe 100644 --- a/core/services/ocr2/plugins/ccip/internal/ccipdata/batchreader/token_pool_batch_reader.go +++ b/core/services/ocr2/plugins/ccip/internal/ccipdata/batchreader/token_pool_batch_reader.go @@ -13,7 +13,7 @@ import ( cciptypes "github.com/smartcontractkit/chainlink-common/pkg/types/ccip" "github.com/smartcontractkit/chainlink-common/pkg/logger" - type_and_version "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/type_and_version_interface_wrapper" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/type_and_version" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ccip/abihelpers" ccipconfig "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ccip/config" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ccip/internal/ccipcalc" @@ -24,7 +24,7 @@ import ( ) var ( - typeAndVersionABI = abihelpers.MustParseABI(type_and_version.TypeAndVersionInterfaceABI) + typeAndVersionABI = abihelpers.MustParseABI(type_and_version.ITypeAndVersionABI) ) type EVMTokenPoolBatchedReader struct { diff --git a/core/services/relay/evm/capabilities/testutils/chain_reader.go b/core/services/relay/evm/capabilities/testutils/chain_reader.go index 64fbf5fe720..07e0f3e05ac 100644 --- a/core/services/relay/evm/capabilities/testutils/chain_reader.go +++ b/core/services/relay/evm/capabilities/testutils/chain_reader.go @@ -14,7 +14,7 @@ import ( commontypes "github.com/smartcontractkit/chainlink-common/pkg/types" commonvalues "github.com/smartcontractkit/chainlink-common/pkg/values" "github.com/smartcontractkit/chainlink/v2/core/capabilities/triggers/logevent/logeventcap" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" coretestutils "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/logger" evmtypes "github.com/smartcontractkit/chainlink/v2/core/services/relay/evm/types" diff --git a/core/services/vrf/v2/listener_v2_log_listener_test.go b/core/services/vrf/v2/listener_v2_log_listener_test.go index 06af4c83f19..a29449a7ebf 100644 --- a/core/services/vrf/v2/listener_v2_log_listener_test.go +++ b/core/services/vrf/v2/listener_v2_log_listener_test.go @@ -26,9 +26,9 @@ import ( evmtypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" ubig "github.com/smartcontractkit/chainlink/v2/core/chains/evm/utils/big" evmmocks "github.com/smartcontractkit/chainlink/v2/core/chains/legacyevm/mocks" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/vrf_coordinator_v2" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/vrf_log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/vrf_log_emitter" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils" "github.com/smartcontractkit/chainlink/v2/core/internal/testutils/pgtest" "github.com/smartcontractkit/chainlink/v2/core/logger" diff --git a/integration-tests/ccip-tests/contracts/contract_deployer.go b/integration-tests/ccip-tests/contracts/contract_deployer.go index 940f76e93b8..0aaec8f66a0 100644 --- a/integration-tests/ccip-tests/contracts/contract_deployer.go +++ b/integration-tests/ccip-tests/contracts/contract_deployer.go @@ -50,10 +50,10 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/usdc_token_pool_1_4_0" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/ccip/generated/weth9" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/link_token_interface" - type_and_version "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/type_and_version_interface_wrapper" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/burn_mint_erc677" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/erc20" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/mock_v3_aggregator_contract" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/type_and_version" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ccip/abihelpers" ccipconfig "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ccip/config" "github.com/smartcontractkit/chainlink/v2/core/services/ocr2/plugins/ccip/testhelpers" @@ -1259,7 +1259,7 @@ func (e *CCIPContractsDeployer) NewMockAggregator(addr common.Address) (*MockAgg } func (e *CCIPContractsDeployer) TypeAndVersion(addr common.Address) (string, error) { - tv, err := type_and_version.NewTypeAndVersionInterface(addr, wrappers.MustNewWrappedContractBackend(e.evmClient, nil)) + tv, err := type_and_version.NewITypeAndVersion(addr, wrappers.MustNewWrappedContractBackend(e.evmClient, nil)) if err != nil { return "", err } diff --git a/integration-tests/contracts/test_contracts.go b/integration-tests/contracts/test_contracts.go index f8674e2136d..f6ea627ef39 100644 --- a/integration-tests/contracts/test_contracts.go +++ b/integration-tests/contracts/test_contracts.go @@ -11,7 +11,7 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/seth" "github.com/smartcontractkit/chainlink/integration-tests/wrappers" - le "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + le "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" ) type LogEmitterContract struct { diff --git a/integration-tests/load/automationv2_1/automationv2_1_test.go b/integration-tests/load/automationv2_1/automationv2_1_test.go index 823c1bd8825..8d8135d214f 100644 --- a/integration-tests/load/automationv2_1/automationv2_1_test.go +++ b/integration-tests/load/automationv2_1/automationv2_1_test.go @@ -47,8 +47,8 @@ import ( aconfig "github.com/smartcontractkit/chainlink/integration-tests/testconfig/automation" "github.com/smartcontractkit/chainlink/integration-tests/testreporters" ac "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/automation_compatible_utils" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/simple_log_upkeep_counter_wrapper" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" ) const ( diff --git a/integration-tests/load/automationv2_1/gun.go b/integration-tests/load/automationv2_1/gun.go index aa61562741c..7e26d906456 100644 --- a/integration-tests/load/automationv2_1/gun.go +++ b/integration-tests/load/automationv2_1/gun.go @@ -10,7 +10,7 @@ import ( "github.com/smartcontractkit/chainlink-testing-framework/seth" - "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" "github.com/smartcontractkit/chainlink/integration-tests/contracts" ) diff --git a/integration-tests/universal/log_poller/helpers.go b/integration-tests/universal/log_poller/helpers.go index 0c127d576c0..c75bff6c0c2 100644 --- a/integration-tests/universal/log_poller/helpers.go +++ b/integration-tests/universal/log_poller/helpers.go @@ -45,7 +45,7 @@ import ( "github.com/smartcontractkit/chainlink/v2/core/chains/evm/logpoller" cltypes "github.com/smartcontractkit/chainlink/v2/core/chains/evm/types" ac "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/automation_compatible_utils" - le "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/generated/log_emitter" + le "github.com/smartcontractkit/chainlink/v2/core/gethwrappers/shared/generated/log_emitter" core_logger "github.com/smartcontractkit/chainlink/v2/core/logger" ) diff --git a/tools/ci/ccip_lcov_prune b/tools/ci/ccip_lcov_prune index 9ec51e53536..fadb2cc410c 100755 --- a/tools/ci/ccip_lcov_prune +++ b/tools/ci/ccip_lcov_prune @@ -12,7 +12,6 @@ set -e # BurnWithFromMintTokenPool is excluded because Forge doesn't seem to # register coverage, even though it is 100% covered. - lcov --remove $1 -o $2 \ '*/ccip/test/*' \ '*/vendor/*' \ @@ -23,8 +22,6 @@ lcov --remove $1 -o $2 \ 'src/v0.8/ccip/libraries/USDPriceWith18Decimals.sol' \ 'src/v0.8/ccip/libraries/MerkleMultiProof.sol' \ 'src/v0.8/ccip/libraries/Pool.sol' \ - 'src/v0.8/ConfirmedOwnerWithProposal.sol' \ - 'src/v0.8/tests/MockV3Aggregator.sol' \ 'src/v0.8/ccip/applications/CCIPClientExample.sol' \ 'src/v0.8/ccip/pools/BurnWithFromMintTokenPool.sol' \ 'src/v0.8/ccip/rmn/RMNHome.sol' \ From ab46d04d983fe145fa490b43bb2fe685a1c08809 Mon Sep 17 00:00:00 2001 From: FelixFan1992 Date: Wed, 8 Jan 2025 11:05:31 -0500 Subject: [PATCH 6/8] DEVSVCS-1087: remove unused automation hardhat tests (#15847) * remove unused automation hardhat tests * freeze contracts * remove more tests * update --- .../action.yml | 4 + .../automation/AutomationGasAnalysis.test.ts | 258 - .../automation/AutomationRegistrar2_1.test.ts | 1022 --- .../automation/AutomationRegistry2_2.test.ts | 5962 ----------------- .../test/v0.8/automation/CronUpkeep.test.ts | 576 -- .../v0.8/automation/CronUpkeepFactory.test.ts | 107 - .../automation/ERC20BalanceMonitor.test.ts | 695 -- .../v0.8/automation/EthBalanceMonitor.test.ts | 663 -- .../IAutomationRegistryMaster2_2.test.ts | 117 - .../LinkAvailableBalanceMonitor.test.ts | 1077 --- .../automation/UpkeepBalanceMonitor.test.ts | 402 -- .../automation/UpkeepTranscoder3_0.test.ts | 576 -- .../automation/UpkeepTranscoder4_0.test.ts | 654 -- contracts/test/v0.8/automation/helpers.ts | 68 - 14 files changed, 4 insertions(+), 12177 deletions(-) delete mode 100644 contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts delete mode 100644 contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts delete mode 100644 contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts delete mode 100644 contracts/test/v0.8/automation/CronUpkeep.test.ts delete mode 100644 contracts/test/v0.8/automation/CronUpkeepFactory.test.ts delete mode 100644 contracts/test/v0.8/automation/ERC20BalanceMonitor.test.ts delete mode 100644 contracts/test/v0.8/automation/EthBalanceMonitor.test.ts delete mode 100644 contracts/test/v0.8/automation/IAutomationRegistryMaster2_2.test.ts delete mode 100644 contracts/test/v0.8/automation/LinkAvailableBalanceMonitor.test.ts delete mode 100644 contracts/test/v0.8/automation/UpkeepBalanceMonitor.test.ts delete mode 100644 contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts delete mode 100644 contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts diff --git a/.github/actions/detect-solidity-readonly-file-changes/action.yml b/.github/actions/detect-solidity-readonly-file-changes/action.yml index faca16d53f0..d0890a9f604 100644 --- a/.github/actions/detect-solidity-readonly-file-changes/action.yml +++ b/.github/actions/detect-solidity-readonly-file-changes/action.yml @@ -16,9 +16,13 @@ runs: filters: | read_only_sol: - 'contracts/src/v0.8/interfaces/**/*' + - 'contracts/src/v0.8/automation/interfaces/**/*' + - 'contracts/src/v0.8/automation/upkeeps/**/*' - 'contracts/src/v0.8/automation/v1_2/**/*' - 'contracts/src/v0.8/automation/v1_3/**/*' - 'contracts/src/v0.8/automation/v2_0/**/*' + - 'contracts/src/v0.8/automation/v2_1/**/*' + - 'contracts/src/v0.8/automation/v2_2/**/*' - name: Fail if read-only files have changed if: ${{ steps.changed_files.outputs.read_only_sol == 'true' }} diff --git a/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts b/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts deleted file mode 100644 index f393a5de1c2..00000000000 --- a/contracts/test/v0.8/automation/AutomationGasAnalysis.test.ts +++ /dev/null @@ -1,258 +0,0 @@ -import { ethers } from 'hardhat' -import { BigNumber } from 'ethers' -import { expect, assert } from 'chai' -import { getUsers } from '../../test-helpers/setup' -import { randomAddress, toWei } from '../../test-helpers/helpers' -import { deployRegistry21 } from './helpers' - -// don't run these tests in CI -const describeMaybe = process.env.CI ? describe.skip : describe - -// registry settings -const f = 1 -const linkEth = BigNumber.from(300000000) -const gasWei = BigNumber.from(100) -const minUpkeepSpend = BigNumber.from('1000000000000000000') -const paymentPremiumPPB = BigNumber.from(250000000) -const flatFeeMicroLink = BigNumber.from(0) -const blockCountPerTurn = 20 -const checkGasLimit = BigNumber.from(20000000) -const fallbackGasPrice = BigNumber.from(200) -const fallbackLinkPrice = BigNumber.from(200000000) -const maxCheckDataSize = BigNumber.from(10000) -const maxPerformDataSize = BigNumber.from(10000) -const maxRevertDataSize = BigNumber.from(1000) -const maxPerformGas = BigNumber.from(5000000) -const stalenessSeconds = BigNumber.from(43820) -const gasCeilingMultiplier = BigNumber.from(1) -const signers = [ - randomAddress(), - randomAddress(), - randomAddress(), - randomAddress(), -] -const transmitters = [ - randomAddress(), - randomAddress(), - randomAddress(), - randomAddress(), -] -const transcoder = ethers.constants.AddressZero - -// registrar settings -const triggerType = 0 // conditional -const autoApproveType = 2 // auto-approve enabled -const autoApproveMaxAllowed = 100 // auto-approve enabled - -// upkeep settings -const name = 'test upkeep' -const encryptedEmail = '0xabcd1234' -const gasLimit = 100_000 -const checkData = '0xdeadbeef' -const amount = toWei('5') -const source = 5 -const triggerConfig = '0x' -const offchainConfig = '0x' - -describeMaybe('Automation Gas Analysis', () => { - it('Compares gas usage amongst registries / registrars', async () => { - assert( - Boolean(process.env.REPORT_GAS), - 'this test must be run with REPORT_GAS=true', - ) - - const personas = (await getUsers()).personas - const owner = personas.Default - const ownerAddress = await owner.getAddress() - - // factories - const getFact = ethers.getContractFactory - const linkTokenFactory = await getFact('LinkToken') - const mockV3AggregatorFactory = await getFact( - 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', - ) - const upkeepMockFactory = await getFact('UpkeepMock') - const registry12Factory = await getFact('KeeperRegistry1_2') - const registrar12Factory = await getFact('KeeperRegistrar') - const registry20Factory = await getFact('KeeperRegistry2_0') - const registryLogic20Factory = await getFact('KeeperRegistryLogic2_0') - const registrar20Factory = await getFact('KeeperRegistrar2_0') - const registrar21Factory = await getFact('AutomationRegistrar2_1') - const forwarderLogicFactory = await getFact('AutomationForwarderLogic') - - // deploy dependancy contracts - const linkToken = await linkTokenFactory.connect(owner).deploy() - const gasPriceFeed = await mockV3AggregatorFactory - .connect(owner) - .deploy(0, gasWei) - const linkEthFeed = await mockV3AggregatorFactory - .connect(owner) - .deploy(9, linkEth) - const upkeep = await upkeepMockFactory.connect(owner).deploy() - - // deploy v1.2 - const registrar12 = await registrar12Factory.connect(owner).deploy( - linkToken.address, - autoApproveType, - autoApproveMaxAllowed, - ethers.constants.AddressZero, // set later - minUpkeepSpend, - ) - const registry12 = await registry12Factory - .connect(owner) - .deploy(linkToken.address, linkEthFeed.address, gasPriceFeed.address, { - paymentPremiumPPB, - flatFeeMicroLink, - blockCountPerTurn, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder, - registrar: registrar12.address, - }) - await registrar12.setRegistrationConfig( - autoApproveType, - autoApproveMaxAllowed, - registry12.address, - minUpkeepSpend, - ) - - // deploy v2.0 - const registryLogic20 = await registryLogic20Factory - .connect(owner) - .deploy(0, linkToken.address, linkEthFeed.address, gasPriceFeed.address) - const registry20 = await registry20Factory - .connect(owner) - .deploy(registryLogic20.address) - const registrar20 = await registrar20Factory - .connect(owner) - .deploy( - linkToken.address, - autoApproveType, - autoApproveMaxAllowed, - registry20.address, - minUpkeepSpend, - ) - const config20 = { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder, - registrar: registrar20.address, - } - const onchainConfig20 = ethers.utils.defaultAbiCoder.encode( - [ - 'tuple(uint32 paymentPremiumPPB,uint32 flatFeeMicroLink,uint32 checkGasLimit,uint24 stalenessSeconds\ - ,uint16 gasCeilingMultiplier,uint96 minUpkeepSpend,uint32 maxPerformGas,uint32 maxCheckDataSize,\ - uint32 maxPerformDataSize,uint256 fallbackGasPrice,uint256 fallbackLinkPrice,address transcoder,\ - address registrar)', - ], - [config20], - ) - await registry20 - .connect(owner) - .setConfig(signers, transmitters, f, onchainConfig20, 1, '0x') - - // deploy v2.1 - const forwarderLogic = await forwarderLogicFactory.connect(owner).deploy() - const registry21 = await deployRegistry21( - owner, - 0, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - forwarderLogic.address, - ) - const registrar21 = await registrar21Factory - .connect(owner) - .deploy(linkToken.address, registry21.address, minUpkeepSpend, [ - { - triggerType, - autoApproveType, - autoApproveMaxAllowed, - }, - ]) - const onchainConfig21 = { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder, - registrars: [registrar21.address], - upkeepPrivilegeManager: randomAddress(), - } - await registry21 - .connect(owner) - .setConfigTypeSafe(signers, transmitters, f, onchainConfig21, 1, '0x') - - // approve LINK - await linkToken.connect(owner).approve(registrar20.address, amount) - await linkToken.connect(owner).approve(registrar21.address, amount) - - const abiEncodedBytes = registrar12.interface.encodeFunctionData( - 'register', - [ - name, - encryptedEmail, - upkeep.address, - gasLimit, - ownerAddress, - checkData, - amount, - source, - ownerAddress, - ], - ) - - let tx = await linkToken - .connect(owner) - .transferAndCall(registrar12.address, amount, abiEncodedBytes) - await expect(tx).to.emit(registry12, 'UpkeepRegistered') - - tx = await registrar20.connect(owner).registerUpkeep({ - name, - encryptedEmail, - upkeepContract: upkeep.address, - gasLimit, - adminAddress: ownerAddress, - checkData, - amount, - offchainConfig, - }) - await expect(tx).to.emit(registry20, 'UpkeepRegistered') - - tx = await registrar21.connect(owner).registerUpkeep({ - name, - encryptedEmail, - upkeepContract: upkeep.address, - gasLimit, - adminAddress: ownerAddress, - triggerType, - checkData, - amount, - triggerConfig, - offchainConfig, - }) - await expect(tx).to.emit(registry21, 'UpkeepRegistered') - }) -}) diff --git a/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts b/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts deleted file mode 100644 index 6d3d591acb0..00000000000 --- a/contracts/test/v0.8/automation/AutomationRegistrar2_1.test.ts +++ /dev/null @@ -1,1022 +0,0 @@ -import { ethers } from 'hardhat' -import { assert } from 'chai' -import { AutomationRegistrar2_1__factory as AutomationRegistrarFactory } from '../../../typechain/factories/AutomationRegistrar2_1__factory' - -////////////////////////////////////////////////////////////////////////////////////////////////// -////////////////////////////////////////////////////////////////////////////////////////////////// - -/*********************************** REGISTRAR v2.1 IS FROZEN ************************************/ - -// As 2.1 is still actively being deployed, we keep the tests below. - -describe('AutomationRegistrar2_1 - Frozen [ @skip-coverage ]', () => { - it('has not changed', () => { - assert.equal( - ethers.utils.id(AutomationRegistrarFactory.bytecode), - '0x9633058bd81e8479f88baaee9bda533406295c80ccbc43d4509701001bbea6e3', - 'KeeperRegistry bytecode has changed', - ) - }) -}) - -////////////////////////////////////////////////////////////////////////////////////////////////// -////////////////////////////////////////////////////////////////////////////////////////////////// -// -// // copied from KeeperRegistryBase2_1.sol -// enum Trigger { -// CONDITION, -// LOG, -// } -// -// let linkTokenFactory: LinkTokenFactory -// let mockV3AggregatorFactory: MockV3AggregatorFactory -// let upkeepMockFactory: UpkeepMockFactory -// -// let personas: Personas -// -// before(async () => { -// personas = (await getUsers()).personas -// -// linkTokenFactory = await ethers.getContractFactory( -// 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', -// ) -// mockV3AggregatorFactory = (await ethers.getContractFactory( -// 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', -// )) as unknown as MockV3AggregatorFactory -// upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') -// }) -// -// const errorMsgs = { -// onlyOwner: 'revert Only callable by owner', -// onlyAdmin: 'OnlyAdminOrOwner()', -// hashPayload: 'HashMismatch()', -// requestNotFound: 'RequestNotFound()', -// } -// -// describe('AutomationRegistrar2_1', () => { -// const upkeepName = 'SampleUpkeep' -// -// const linkEth = BigNumber.from(300000000) -// const gasWei = BigNumber.from(100) -// const performGas = BigNumber.from(100000) -// const paymentPremiumPPB = BigNumber.from(250000000) -// const flatFeeMicroLink = BigNumber.from(0) -// const maxAllowedAutoApprove = 5 -// const trigger = '0xdeadbeef' -// const offchainConfig = '0x01234567' -// -// const emptyBytes = '0x00' -// const stalenessSeconds = BigNumber.from(43820) -// const gasCeilingMultiplier = BigNumber.from(1) -// const checkGasLimit = BigNumber.from(20000000) -// const fallbackGasPrice = BigNumber.from(200) -// const fallbackLinkPrice = BigNumber.from(200000000) -// const maxCheckDataSize = BigNumber.from(10000) -// const maxPerformDataSize = BigNumber.from(10000) -// const maxRevertDataSize = BigNumber.from(1000) -// const maxPerformGas = BigNumber.from(5000000) -// const minUpkeepSpend = BigNumber.from('1000000000000000000') -// const amount = BigNumber.from('5000000000000000000') -// const amount1 = BigNumber.from('6000000000000000000') -// const transcoder = ethers.constants.AddressZero -// const upkeepManager = ethers.Wallet.createRandom().address -// -// // Enum values are not auto exported in ABI so have to manually declare -// const autoApproveType_DISABLED = 0 -// const autoApproveType_ENABLED_SENDER_ALLOWLIST = 1 -// const autoApproveType_ENABLED_ALL = 2 -// -// let owner: Signer -// let admin: Signer -// let someAddress: Signer -// let registrarOwner: Signer -// let stranger: Signer -// let requestSender: Signer -// -// let linkToken: LinkToken -// let linkEthFeed: MockV3Aggregator -// let gasPriceFeed: MockV3Aggregator -// let mock: UpkeepMock -// let registry: IKeeperRegistry -// let registrar: Registrar -// -// beforeEach(async () => { -// owner = personas.Default -// admin = personas.Neil -// someAddress = personas.Ned -// registrarOwner = personas.Nelly -// stranger = personas.Nancy -// requestSender = personas.Norbert -// -// linkToken = await linkTokenFactory.connect(owner).deploy() -// gasPriceFeed = await mockV3AggregatorFactory -// .connect(owner) -// .deploy(0, gasWei) -// linkEthFeed = await mockV3AggregatorFactory -// .connect(owner) -// .deploy(9, linkEth) -// -// registry = await deployRegistry21( -// owner, -// 0, -// linkToken.address, -// linkEthFeed.address, -// gasPriceFeed.address, -// ) -// -// mock = await upkeepMockFactory.deploy() -// -// const registrarFactory = await ethers.getContractFactory( -// 'AutomationRegistrar2_1', -// ) -// registrar = await registrarFactory -// .connect(registrarOwner) -// .deploy(linkToken.address, registry.address, minUpkeepSpend, [ -// { -// triggerType: Trigger.CONDITION, -// autoApproveType: autoApproveType_DISABLED, -// autoApproveMaxAllowed: 0, -// }, -// { -// triggerType: Trigger.LOG, -// autoApproveType: autoApproveType_DISABLED, -// autoApproveMaxAllowed: 0, -// }, -// ]) -// -// await linkToken -// .connect(owner) -// .transfer(await requestSender.getAddress(), toWei('1000')) -// -// const keepers = [ -// await personas.Carol.getAddress(), -// await personas.Nancy.getAddress(), -// await personas.Ned.getAddress(), -// await personas.Neil.getAddress(), -// ] -// const onchainConfig = { -// paymentPremiumPPB, -// flatFeeMicroLink, -// checkGasLimit, -// stalenessSeconds, -// gasCeilingMultiplier, -// minUpkeepSpend, -// maxCheckDataSize, -// maxPerformDataSize, -// maxRevertDataSize, -// maxPerformGas, -// fallbackGasPrice, -// fallbackLinkPrice, -// transcoder, -// registrars: [registrar.address], -// upkeepPrivilegeManager: upkeepManager, -// } -// await registry -// .connect(owner) -// .setConfigTypeSafe(keepers, keepers, 1, onchainConfig, 1, '0x') -// }) -// -// describe('#typeAndVersion', () => { -// it('uses the correct type and version', async () => { -// const typeAndVersion = await registrar.typeAndVersion() -// assert.equal(typeAndVersion, 'AutomationRegistrar 2.1.0') -// }) -// }) -// -// describe('#register', () => { -// it('reverts if not called by the LINK token', async () => { -// await evmRevert( -// registrar -// .connect(someAddress) -// .register( -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ), -// 'OnlyLink()', -// ) -// }) -// -// it('reverts if the amount passed in data mismatches actual amount sent', async () => { -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_ENABLED_ALL, -// maxAllowedAutoApprove, -// ) -// -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount1, -// await requestSender.getAddress(), -// ], -// ) -// -// await evmRevert( -// linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes), -// 'AmountMismatch()', -// ) -// }) -// -// it('reverts if the sender passed in data mismatches actual sender', async () => { -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await admin.getAddress(), // Should have been requestSender.getAddress() -// ], -// ) -// await evmRevert( -// linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes), -// 'SenderMismatch()', -// ) -// }) -// -// it('reverts if the admin address is 0x0000...', async () => { -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// '0x0000000000000000000000000000000000000000', -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// -// await evmRevert( -// linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes), -// 'RegistrationRequestFailed()', -// ) -// }) -// -// it('Auto Approve ON - registers an upkeep on KeeperRegistry instantly and emits both RegistrationRequested and RegistrationApproved events', async () => { -// //set auto approve ON with high threshold limits -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_ENABLED_ALL, -// maxAllowedAutoApprove, -// ) -// -// //register with auto approve ON -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// const tx = await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// -// const [id] = await registry.getActiveUpkeepIDs(0, 1) -// -// //confirm if a new upkeep has been registered and the details are the same as the one just registered -// const newupkeep = await registry.getUpkeep(id) -// assert.equal(newupkeep.target, mock.address) -// assert.equal(newupkeep.admin, await admin.getAddress()) -// assert.equal(newupkeep.checkData, emptyBytes) -// assert.equal(newupkeep.balance.toString(), amount.toString()) -// assert.equal(newupkeep.performGas, performGas.toNumber()) -// assert.equal(newupkeep.offchainConfig, offchainConfig) -// -// await expect(tx).to.emit(registrar, 'RegistrationRequested') -// await expect(tx).to.emit(registrar, 'RegistrationApproved') -// }) -// -// it('Auto Approve OFF - does not registers an upkeep on KeeperRegistry, emits only RegistrationRequested event', async () => { -// //get upkeep count before attempting registration -// const beforeCount = (await registry.getState()).state.numUpkeeps -// -// //set auto approve OFF, threshold limits dont matter in this case -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_DISABLED, -// maxAllowedAutoApprove, -// ) -// -// //register with auto approve OFF -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// const tx = await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// const receipt = await tx.wait() -// -// //get upkeep count after attempting registration -// const afterCount = (await registry.getState()).state.numUpkeeps -// //confirm that a new upkeep has NOT been registered and upkeep count is still the same -// assert.deepEqual(beforeCount, afterCount) -// -// //confirm that only RegistrationRequested event is emitted and RegistrationApproved event is not -// await expect(tx).to.emit(registrar, 'RegistrationRequested') -// await expect(tx).not.to.emit(registrar, 'RegistrationApproved') -// -// const hash = receipt.logs[2].topics[1] -// const pendingRequest = await registrar.getPendingRequest(hash) -// assert.equal(await admin.getAddress(), pendingRequest[0]) -// assert.ok(amount.eq(pendingRequest[1])) -// }) -// -// it('Auto Approve ON - Throttle max approvals - does not register an upkeep on KeeperRegistry beyond the max limit, emits only RegistrationRequested event after limit is hit', async () => { -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 0) -// -// //set auto approve on, with max 1 allowed -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig(Trigger.CONDITION, autoApproveType_ENABLED_ALL, 1) -// -// //set auto approve on, with max 1 allowed -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig(Trigger.LOG, autoApproveType_ENABLED_ALL, 1) -// -// // register within threshold, new upkeep should be registered -// let abiEncodedBytes = registrar.interface.encodeFunctionData('register', [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ]) -// await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 1) // 0 -> 1 -// -// // try registering another one, new upkeep should not be registered -// abiEncodedBytes = registrar.interface.encodeFunctionData('register', [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas.toNumber() + 1, // make unique hash -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ]) -// await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 1) // Still 1 -// -// // register a second type of upkeep, different limit -// abiEncodedBytes = registrar.interface.encodeFunctionData('register', [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// Trigger.LOG, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ]) -// await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 2) // 1 -> 2 -// -// // Now set new max limit to 2. One more upkeep should get auto approved -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig(Trigger.CONDITION, autoApproveType_ENABLED_ALL, 2) -// -// abiEncodedBytes = registrar.interface.encodeFunctionData('register', [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas.toNumber() + 2, // make unique hash -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ]) -// await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 3) // 2 -> 3 -// -// // One more upkeep should not get registered -// abiEncodedBytes = registrar.interface.encodeFunctionData('register', [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas.toNumber() + 3, // make unique hash -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ]) -// await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 3) // Still 3 -// }) -// -// it('Auto Approve Sender Allowlist - sender in allowlist - registers an upkeep on KeeperRegistry instantly and emits both RegistrationRequested and RegistrationApproved events', async () => { -// const senderAddress = await requestSender.getAddress() -// -// //set auto approve to ENABLED_SENDER_ALLOWLIST type with high threshold limits -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_ENABLED_SENDER_ALLOWLIST, -// maxAllowedAutoApprove, -// ) -// -// // Add sender to allowlist -// await registrar -// .connect(registrarOwner) -// .setAutoApproveAllowedSender(senderAddress, true) -// -// //register with auto approve ON -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// const tx = await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// -// const [id] = await registry.getActiveUpkeepIDs(0, 1) -// -// //confirm if a new upkeep has been registered and the details are the same as the one just registered -// const newupkeep = await registry.getUpkeep(id) -// assert.equal(newupkeep.target, mock.address) -// assert.equal(newupkeep.admin, await admin.getAddress()) -// assert.equal(newupkeep.checkData, emptyBytes) -// assert.equal(newupkeep.balance.toString(), amount.toString()) -// assert.equal(newupkeep.performGas, performGas.toNumber()) -// -// await expect(tx).to.emit(registrar, 'RegistrationRequested') -// await expect(tx).to.emit(registrar, 'RegistrationApproved') -// }) -// -// it('Auto Approve Sender Allowlist - sender NOT in allowlist - does not registers an upkeep on KeeperRegistry, emits only RegistrationRequested event', async () => { -// const beforeCount = (await registry.getState()).state.numUpkeeps -// const senderAddress = await requestSender.getAddress() -// -// //set auto approve to ENABLED_SENDER_ALLOWLIST type with high threshold limits -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_ENABLED_SENDER_ALLOWLIST, -// maxAllowedAutoApprove, -// ) -// -// // Explicitly remove sender from allowlist -// await registrar -// .connect(registrarOwner) -// .setAutoApproveAllowedSender(senderAddress, false) -// -// //register. auto approve shouldn't happen -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// const tx = await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// const receipt = await tx.wait() -// -// //get upkeep count after attempting registration -// const afterCount = (await registry.getState()).state.numUpkeeps -// //confirm that a new upkeep has NOT been registered and upkeep count is still the same -// assert.deepEqual(beforeCount, afterCount) -// -// //confirm that only RegistrationRequested event is emitted and RegistrationApproved event is not -// await expect(tx).to.emit(registrar, 'RegistrationRequested') -// await expect(tx).not.to.emit(registrar, 'RegistrationApproved') -// -// const hash = receipt.logs[2].topics[1] -// const pendingRequest = await registrar.getPendingRequest(hash) -// assert.equal(await admin.getAddress(), pendingRequest[0]) -// assert.ok(amount.eq(pendingRequest[1])) -// }) -// }) -// -// describe('#registerUpkeep', () => { -// it('reverts with empty message if amount sent is not available in LINK allowance', async () => { -// await evmRevert( -// registrar.connect(someAddress).registerUpkeep({ -// name: upkeepName, -// upkeepContract: mock.address, -// gasLimit: performGas, -// adminAddress: await admin.getAddress(), -// triggerType: 0, -// checkData: emptyBytes, -// triggerConfig: trigger, -// offchainConfig: emptyBytes, -// amount, -// encryptedEmail: emptyBytes, -// }), -// '', -// ) -// }) -// -// it('reverts if the amount passed in data is less than configured minimum', async () => { -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_ENABLED_ALL, -// maxAllowedAutoApprove, -// ) -// -// // amt is one order of magnitude less than minUpkeepSpend -// const amt = BigNumber.from('100000000000000000') -// -// await evmRevert( -// registrar.connect(someAddress).registerUpkeep({ -// name: upkeepName, -// upkeepContract: mock.address, -// gasLimit: performGas, -// adminAddress: await admin.getAddress(), -// triggerType: 0, -// checkData: emptyBytes, -// triggerConfig: trigger, -// offchainConfig: emptyBytes, -// amount: amt, -// encryptedEmail: emptyBytes, -// }), -// 'InsufficientPayment()', -// ) -// }) -// -// it('Auto Approve ON - registers an upkeep on KeeperRegistry instantly and emits both RegistrationRequested and RegistrationApproved events', async () => { -// //set auto approve ON with high threshold limits -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_ENABLED_ALL, -// maxAllowedAutoApprove, -// ) -// -// await linkToken.connect(requestSender).approve(registrar.address, amount) -// -// const tx = await registrar.connect(requestSender).registerUpkeep({ -// name: upkeepName, -// upkeepContract: mock.address, -// gasLimit: performGas, -// adminAddress: await admin.getAddress(), -// triggerType: 0, -// checkData: emptyBytes, -// triggerConfig: trigger, -// offchainConfig, -// amount, -// encryptedEmail: emptyBytes, -// }) -// assert.equal((await registry.getState()).state.numUpkeeps.toNumber(), 1) // 0 -> 1 -// -// //confirm if a new upkeep has been registered and the details are the same as the one just registered -// const [id] = await registry.getActiveUpkeepIDs(0, 1) -// const newupkeep = await registry.getUpkeep(id) -// assert.equal(newupkeep.target, mock.address) -// assert.equal(newupkeep.admin, await admin.getAddress()) -// assert.equal(newupkeep.checkData, emptyBytes) -// assert.equal(newupkeep.balance.toString(), amount.toString()) -// assert.equal(newupkeep.performGas, performGas.toNumber()) -// assert.equal(newupkeep.offchainConfig, offchainConfig) -// -// await expect(tx).to.emit(registrar, 'RegistrationRequested') -// await expect(tx).to.emit(registrar, 'RegistrationApproved') -// }) -// }) -// -// describe('#setAutoApproveAllowedSender', () => { -// it('reverts if not called by the owner', async () => { -// const tx = registrar -// .connect(stranger) -// .setAutoApproveAllowedSender(await admin.getAddress(), false) -// await evmRevert(tx, 'Only callable by owner') -// }) -// -// it('sets the allowed status correctly and emits log', async () => { -// const senderAddress = await stranger.getAddress() -// let tx = await registrar -// .connect(registrarOwner) -// .setAutoApproveAllowedSender(senderAddress, true) -// await expect(tx) -// .to.emit(registrar, 'AutoApproveAllowedSenderSet') -// .withArgs(senderAddress, true) -// -// let senderAllowedStatus = await registrar -// .connect(owner) -// .getAutoApproveAllowedSender(senderAddress) -// assert.isTrue(senderAllowedStatus) -// -// tx = await registrar -// .connect(registrarOwner) -// .setAutoApproveAllowedSender(senderAddress, false) -// await expect(tx) -// .to.emit(registrar, 'AutoApproveAllowedSenderSet') -// .withArgs(senderAddress, false) -// -// senderAllowedStatus = await registrar -// .connect(owner) -// .getAutoApproveAllowedSender(senderAddress) -// assert.isFalse(senderAllowedStatus) -// }) -// }) -// -// describe('#setTriggerConfig', () => { -// it('reverts if not called by the owner', async () => { -// const tx = registrar -// .connect(stranger) -// .setTriggerConfig(Trigger.LOG, autoApproveType_ENABLED_ALL, 100) -// await evmRevert(tx, 'Only callable by owner') -// }) -// -// it('changes the config', async () => { -// const tx = await registrar -// .connect(registrarOwner) -// .setTriggerConfig(Trigger.LOG, autoApproveType_ENABLED_ALL, 100) -// await registrar.getTriggerRegistrationDetails(Trigger.LOG) -// await expect(tx) -// .to.emit(registrar, 'TriggerConfigSet') -// .withArgs(Trigger.LOG, autoApproveType_ENABLED_ALL, 100) -// }) -// }) -// -// describe('#approve', () => { -// let hash: string -// -// beforeEach(async () => { -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_DISABLED, -// maxAllowedAutoApprove, -// ) -// -// //register with auto approve OFF -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// -// const tx = await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// const receipt = await tx.wait() -// hash = receipt.logs[2].topics[1] -// }) -// -// it('reverts if not called by the owner', async () => { -// const tx = registrar -// .connect(stranger) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// emptyBytes, -// hash, -// ) -// await evmRevert(tx, 'Only callable by owner') -// }) -// -// it('reverts if the hash does not exist', async () => { -// const tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// emptyBytes, -// '0x000000000000000000000000322813fd9a801c5507c9de605d63cea4f2ce6c44', -// ) -// await evmRevert(tx, errorMsgs.requestNotFound) -// }) -// -// it('reverts if any member of the payload changes', async () => { -// let tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// ethers.Wallet.createRandom().address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// emptyBytes, -// hash, -// ) -// await evmRevert(tx, errorMsgs.hashPayload) -// tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// 10000, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// emptyBytes, -// hash, -// ) -// await evmRevert(tx, errorMsgs.hashPayload) -// tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// ethers.Wallet.createRandom().address, -// 0, -// emptyBytes, -// trigger, -// emptyBytes, -// hash, -// ) -// await evmRevert(tx, errorMsgs.hashPayload) -// tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// '0x1234', -// trigger, -// emptyBytes, -// hash, -// ) -// await evmRevert(tx, errorMsgs.hashPayload) -// }) -// -// it('approves an existing registration request', async () => { -// const tx = await registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// hash, -// ) -// await expect(tx).to.emit(registrar, 'RegistrationApproved') -// }) -// -// it('deletes the request afterwards / reverts if the request DNE', async () => { -// await registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// hash, -// ) -// const tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// hash, -// ) -// await evmRevert(tx, errorMsgs.requestNotFound) -// }) -// }) -// -// describe('#cancel', () => { -// let hash: string -// -// beforeEach(async () => { -// await registrar -// .connect(registrarOwner) -// .setTriggerConfig( -// Trigger.CONDITION, -// autoApproveType_DISABLED, -// maxAllowedAutoApprove, -// ) -// -// //register with auto approve OFF -// const abiEncodedBytes = registrar.interface.encodeFunctionData( -// 'register', -// [ -// upkeepName, -// emptyBytes, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// offchainConfig, -// amount, -// await requestSender.getAddress(), -// ], -// ) -// const tx = await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// const receipt = await tx.wait() -// hash = receipt.logs[2].topics[1] -// // submit duplicate request (increase balance) -// await linkToken -// .connect(requestSender) -// .transferAndCall(registrar.address, amount, abiEncodedBytes) -// }) -// -// it('reverts if not called by the admin / owner', async () => { -// const tx = registrar.connect(stranger).cancel(hash) -// await evmRevert(tx, errorMsgs.onlyAdmin) -// }) -// -// it('reverts if the hash does not exist', async () => { -// const tx = registrar -// .connect(registrarOwner) -// .cancel( -// '0x000000000000000000000000322813fd9a801c5507c9de605d63cea4f2ce6c44', -// ) -// await evmRevert(tx, errorMsgs.requestNotFound) -// }) -// -// it('refunds the total request balance to the admin address if owner cancels', async () => { -// const before = await linkToken.balanceOf(await admin.getAddress()) -// const tx = await registrar.connect(registrarOwner).cancel(hash) -// const after = await linkToken.balanceOf(await admin.getAddress()) -// assert.isTrue(after.sub(before).eq(amount.mul(BigNumber.from(2)))) -// await expect(tx).to.emit(registrar, 'RegistrationRejected') -// }) -// -// it('refunds the total request balance to the admin address if admin cancels', async () => { -// const before = await linkToken.balanceOf(await admin.getAddress()) -// const tx = await registrar.connect(admin).cancel(hash) -// const after = await linkToken.balanceOf(await admin.getAddress()) -// assert.isTrue(after.sub(before).eq(amount.mul(BigNumber.from(2)))) -// await expect(tx).to.emit(registrar, 'RegistrationRejected') -// }) -// -// it('deletes the request hash', async () => { -// await registrar.connect(registrarOwner).cancel(hash) -// let tx = registrar.connect(registrarOwner).cancel(hash) -// await evmRevert(tx, errorMsgs.requestNotFound) -// tx = registrar -// .connect(registrarOwner) -// .approve( -// upkeepName, -// mock.address, -// performGas, -// await admin.getAddress(), -// 0, -// emptyBytes, -// trigger, -// emptyBytes, -// hash, -// ) -// await evmRevert(tx, errorMsgs.requestNotFound) -// }) -// }) -// }) diff --git a/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts b/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts deleted file mode 100644 index 593ac08a5e7..00000000000 --- a/contracts/test/v0.8/automation/AutomationRegistry2_2.test.ts +++ /dev/null @@ -1,5962 +0,0 @@ -import { ethers } from 'hardhat' -import { loadFixture } from '@nomicfoundation/hardhat-network-helpers' -import { assert, expect } from 'chai' -import { - BigNumber, - BigNumberish, - BytesLike, - Contract, - ContractFactory, - ContractReceipt, - ContractTransaction, - Signer, - Wallet, -} from 'ethers' -import { evmRevert, evmRevertCustomError } from '../../test-helpers/matchers' -import { getUsers, Personas } from '../../test-helpers/setup' -import { randomAddress, toWei } from '../../test-helpers/helpers' -import { StreamsLookupUpkeep__factory as StreamsLookupUpkeepFactory } from '../../../typechain/factories/StreamsLookupUpkeep__factory' -import { MockV3Aggregator__factory as MockV3AggregatorFactory } from '../../../typechain/factories/MockV3Aggregator__factory' -import { UpkeepMock__factory as UpkeepMockFactory } from '../../../typechain/factories/UpkeepMock__factory' -import { UpkeepAutoFunder__factory as UpkeepAutoFunderFactory } from '../../../typechain/factories/UpkeepAutoFunder__factory' -import { MockArbGasInfo__factory as MockArbGasInfoFactory } from '../../../typechain/factories/MockArbGasInfo__factory' -import { MockOVMGasPriceOracle__factory as MockOVMGasPriceOracleFactory } from '../../../typechain/factories/MockOVMGasPriceOracle__factory' -import { ChainModuleBase__factory as ChainModuleBaseFactory } from '../../../typechain/factories/ChainModuleBase__factory' -import { ArbitrumModule__factory as ArbitrumModuleFactory } from '../../../typechain/factories/ArbitrumModule__factory' -import { OptimismModuleV2__factory as OptimismModuleV2Factory } from '../../../typechain/factories/OptimismModuleV2__factory' -import { ILogAutomation__factory as ILogAutomationactory } from '../../../typechain/factories/ILogAutomation__factory' -import { IAutomationForwarder__factory as IAutomationForwarderFactory } from '../../../typechain/factories/IAutomationForwarder__factory' -import { MockArbSys__factory as MockArbSysFactory } from '../../../typechain/factories/MockArbSys__factory' -import { AutomationCompatibleUtils } from '../../../typechain/AutomationCompatibleUtils' -import { MockArbGasInfo } from '../../../typechain/MockArbGasInfo' -import { MockOVMGasPriceOracle } from '../../../typechain/MockOVMGasPriceOracle' -import { StreamsLookupUpkeep } from '../../../typechain/StreamsLookupUpkeep' -import { MockV3Aggregator } from '../../../typechain/MockV3Aggregator' -import { UpkeepMock } from '../../../typechain/UpkeepMock' -import { ChainModuleBase } from '../../../typechain/ChainModuleBase' -import { ArbitrumModule } from '../../../typechain/ArbitrumModule' -import { OptimismModuleV2 } from '../../../typechain/OptimismModuleV2' -import { UpkeepTranscoder } from '../../../typechain/UpkeepTranscoder' -import { IChainModule, UpkeepAutoFunder } from '../../../typechain' -import { - CancelledUpkeepReportEvent, - IAutomationRegistryMaster as IAutomationRegistry, - ReorgedUpkeepReportEvent, - StaleUpkeepReportEvent, - UpkeepPerformedEvent, -} from '../../../typechain/IAutomationRegistryMaster' -import { - deployMockContract, - MockContract, -} from '@ethereum-waffle/mock-contract' -import { deployRegistry22 } from './helpers' - -const describeMaybe = process.env.SKIP_SLOW ? describe.skip : describe -const itMaybe = process.env.SKIP_SLOW ? it.skip : it - -// copied from AutomationRegistryInterface2_2.sol -enum UpkeepFailureReason { - NONE, - UPKEEP_CANCELLED, - UPKEEP_PAUSED, - TARGET_CHECK_REVERTED, - UPKEEP_NOT_NEEDED, - PERFORM_DATA_EXCEEDS_LIMIT, - INSUFFICIENT_BALANCE, - CHECK_CALLBACK_REVERTED, - REVERT_DATA_EXCEEDS_LIMIT, - REGISTRY_PAUSED, -} - -// copied from AutomationRegistryBase2_2.sol -enum Trigger { - CONDITION, - LOG, -} - -// un-exported types that must be extracted from the utils contract -type Report = Parameters[0] -type LogTrigger = Parameters[0] -type ConditionalTrigger = Parameters< - AutomationCompatibleUtils['_conditionalTrigger'] ->[0] -type Log = Parameters[0] - -// ----------------------------------------------------------------------------------------------- - -// These values should match the constants declared in registry -let registryConditionalOverhead: BigNumber -let registryLogOverhead: BigNumber -let registryPerSignerGasOverhead: BigNumber -let registryPerPerformByteGasOverhead: BigNumber -let registryTransmitCalldataFixedBytesOverhead: BigNumber -let registryTransmitCalldataPerSignerBytesOverhead: BigNumber -let cancellationDelay: number - -// This is the margin for gas that we test for. Gas charged should always be greater -// than total gas used in tx but should not increase beyond this margin -const gasCalculationMargin = BigNumber.from(5000) -// This is the margin for gas overhead estimation in checkUpkeep. The estimated gas -// overhead should be larger than actual gas overhead but should not increase beyond this margin -const gasEstimationMargin = BigNumber.from(5000) - -const linkEth = BigNumber.from(5000000000000000) // 1 Link = 0.005 Eth -const gasWei = BigNumber.from(1000000000) // 1 gwei -// ----------------------------------------------------------------------------------------------- -// test-wide configs for upkeeps -const linkDivisibility = BigNumber.from('1000000000000000000') -const performGas = BigNumber.from('1000000') -const paymentPremiumBase = BigNumber.from('1000000000') -const paymentPremiumPPB = BigNumber.from('250000000') -const flatFeeMicroLink = BigNumber.from(0) - -const randomBytes = '0x1234abcd' -const emptyBytes = '0x' -const emptyBytes32 = - '0x0000000000000000000000000000000000000000000000000000000000000000' - -const transmitGasOverhead = 1_000_000 -const checkGasOverhead = 500_000 - -const stalenessSeconds = BigNumber.from(43820) -const gasCeilingMultiplier = BigNumber.from(2) -const checkGasLimit = BigNumber.from(10000000) -const fallbackGasPrice = gasWei.mul(BigNumber.from('2')) -const fallbackLinkPrice = linkEth.div(BigNumber.from('2')) -const maxCheckDataSize = BigNumber.from(1000) -const maxPerformDataSize = BigNumber.from(1000) -const maxRevertDataSize = BigNumber.from(1000) -const maxPerformGas = BigNumber.from(5000000) -const minUpkeepSpend = BigNumber.from(0) -const f = 1 -const offchainVersion = 1 -const offchainBytes = '0x' -const zeroAddress = ethers.constants.AddressZero -const epochAndRound5_1 = - '0x0000000000000000000000000000000000000000000000000000000000000501' - -let logTriggerConfig: string - -// ----------------------------------------------------------------------------------------------- - -// Smart contract factories -let linkTokenFactory: ContractFactory -let mockArbGasInfoFactory: MockArbGasInfoFactory -let mockOVMGasPriceOracleFactory: MockOVMGasPriceOracleFactory -let mockV3AggregatorFactory: MockV3AggregatorFactory -let upkeepMockFactory: UpkeepMockFactory -let upkeepAutoFunderFactory: UpkeepAutoFunderFactory -let chainModuleBaseFactory: ChainModuleBaseFactory -let arbitrumModuleFactory: ArbitrumModuleFactory -let optimismModuleV2Factory: OptimismModuleV2Factory -let streamsLookupUpkeepFactory: StreamsLookupUpkeepFactory -let personas: Personas - -// contracts -let linkToken: Contract -let linkEthFeed: MockV3Aggregator -let gasPriceFeed: MockV3Aggregator -let registry: IAutomationRegistry // default registry, used for most tests -let arbRegistry: IAutomationRegistry // arbitrum registry -let opRegistry: IAutomationRegistry // optimism registry -let mgRegistry: IAutomationRegistry // "migrate registry" used in migration tests -let blankRegistry: IAutomationRegistry // used to test initial configurations -let mockArbGasInfo: MockArbGasInfo -let mockOVMGasPriceOracle: MockOVMGasPriceOracle -let mock: UpkeepMock -let autoFunderUpkeep: UpkeepAutoFunder -let ltUpkeep: MockContract -let transcoder: UpkeepTranscoder -let chainModuleBase: ChainModuleBase -let arbitrumModule: ArbitrumModule -let optimismModule: OptimismModuleV2 -let streamsLookupUpkeep: StreamsLookupUpkeep -let automationUtils: AutomationCompatibleUtils - -function now() { - return Math.floor(Date.now() / 1000) -} - -async function getUpkeepID(tx: ContractTransaction): Promise { - const receipt = await tx.wait() - for (const event of receipt.events || []) { - if ( - event.args && - event.eventSignature == 'UpkeepRegistered(uint256,uint32,address)' - ) { - return event.args[0] - } - } - throw new Error('could not find upkeep ID in tx event logs') -} - -const getTriggerType = (upkeepId: BigNumber): Trigger => { - const hexBytes = ethers.utils.defaultAbiCoder.encode(['uint256'], [upkeepId]) - const bytes = ethers.utils.arrayify(hexBytes) - for (let idx = 4; idx < 15; idx++) { - if (bytes[idx] != 0) { - return Trigger.CONDITION - } - } - return bytes[15] as Trigger -} - -const encodeBlockTrigger = (conditionalTrigger: ConditionalTrigger) => { - return ( - '0x' + - automationUtils.interface - .encodeFunctionData('_conditionalTrigger', [conditionalTrigger]) - .slice(10) - ) -} - -const encodeLogTrigger = (logTrigger: LogTrigger) => { - return ( - '0x' + - automationUtils.interface - .encodeFunctionData('_logTrigger', [logTrigger]) - .slice(10) - ) -} - -const encodeLog = (log: Log) => { - return ( - '0x' + automationUtils.interface.encodeFunctionData('_log', [log]).slice(10) - ) -} - -const encodeReport = (report: Report) => { - return ( - '0x' + - automationUtils.interface.encodeFunctionData('_report', [report]).slice(10) - ) -} - -type UpkeepData = { - Id: BigNumberish - performGas: BigNumberish - performData: BytesLike - trigger: BytesLike -} - -const makeReport = (upkeeps: UpkeepData[]) => { - const upkeepIds = upkeeps.map((u) => u.Id) - const performGases = upkeeps.map((u) => u.performGas) - const triggers = upkeeps.map((u) => u.trigger) - const performDatas = upkeeps.map((u) => u.performData) - return encodeReport({ - fastGasWei: gasWei, - linkNative: linkEth, - upkeepIds, - gasLimits: performGases, - triggers, - performDatas, - }) -} - -const makeLatestBlockReport = async (upkeepsIDs: BigNumberish[]) => { - const latestBlock = await ethers.provider.getBlock('latest') - const upkeeps: UpkeepData[] = [] - for (let i = 0; i < upkeepsIDs.length; i++) { - upkeeps.push({ - Id: upkeepsIDs[i], - performGas, - trigger: encodeBlockTrigger({ - blockNum: latestBlock.number, - blockHash: latestBlock.hash, - }), - performData: '0x', - }) - } - return makeReport(upkeeps) -} - -const signReport = ( - reportContext: string[], - report: any, - signers: Wallet[], -) => { - const reportDigest = ethers.utils.keccak256(report) - const packedArgs = ethers.utils.solidityPack( - ['bytes32', 'bytes32[3]'], - [reportDigest, reportContext], - ) - const packedDigest = ethers.utils.keccak256(packedArgs) - - const signatures = [] - for (const signer of signers) { - signatures.push(signer._signingKey().signDigest(packedDigest)) - } - const vs = signatures.map((i) => '0' + (i.v - 27).toString(16)).join('') - return { - vs: '0x' + vs.padEnd(64, '0'), - rs: signatures.map((i) => i.r), - ss: signatures.map((i) => i.s), - } -} - -const parseUpkeepPerformedLogs = (receipt: ContractReceipt) => { - const parsedLogs = [] - for (const rawLog of receipt.logs) { - try { - const log = registry.interface.parseLog(rawLog) - if ( - log.name == - registry.interface.events[ - 'UpkeepPerformed(uint256,bool,uint96,uint256,uint256,bytes)' - ].name - ) { - parsedLogs.push(log as unknown as UpkeepPerformedEvent) - } - } catch { - continue - } - } - return parsedLogs -} - -const parseReorgedUpkeepReportLogs = (receipt: ContractReceipt) => { - const parsedLogs = [] - for (const rawLog of receipt.logs) { - try { - const log = registry.interface.parseLog(rawLog) - if ( - log.name == - registry.interface.events['ReorgedUpkeepReport(uint256,bytes)'].name - ) { - parsedLogs.push(log as unknown as ReorgedUpkeepReportEvent) - } - } catch { - continue - } - } - return parsedLogs -} - -const parseStaleUpkeepReportLogs = (receipt: ContractReceipt) => { - const parsedLogs = [] - for (const rawLog of receipt.logs) { - try { - const log = registry.interface.parseLog(rawLog) - if ( - log.name == - registry.interface.events['StaleUpkeepReport(uint256,bytes)'].name - ) { - parsedLogs.push(log as unknown as StaleUpkeepReportEvent) - } - } catch { - continue - } - } - return parsedLogs -} - -const parseCancelledUpkeepReportLogs = (receipt: ContractReceipt) => { - const parsedLogs = [] - for (const rawLog of receipt.logs) { - try { - const log = registry.interface.parseLog(rawLog) - if ( - log.name == - registry.interface.events['CancelledUpkeepReport(uint256,bytes)'].name - ) { - parsedLogs.push(log as unknown as CancelledUpkeepReportEvent) - } - } catch { - continue - } - } - return parsedLogs -} - -describe('AutomationRegistry2_2', () => { - let owner: Signer - let keeper1: Signer - let keeper2: Signer - let keeper3: Signer - let keeper4: Signer - let keeper5: Signer - let nonkeeper: Signer - let signer1: Wallet - let signer2: Wallet - let signer3: Wallet - let signer4: Wallet - let signer5: Wallet - let admin: Signer - let payee1: Signer - let payee2: Signer - let payee3: Signer - let payee4: Signer - let payee5: Signer - - let upkeepId: BigNumber // conditional upkeep - let afUpkeepId: BigNumber // auto funding upkeep - let logUpkeepId: BigNumber // log trigger upkeepID - let streamsLookupUpkeepId: BigNumber // streams lookup upkeep - const numUpkeeps = 4 // see above - let keeperAddresses: string[] - let payees: string[] - let signers: Wallet[] - let signerAddresses: string[] - let config: any - let arbConfig: any - let opConfig: any - let baseConfig: Parameters - let arbConfigParams: Parameters - let opConfigParams: Parameters - let upkeepManager: string - - before(async () => { - personas = (await getUsers()).personas - - const convFactory = await ethers.getContractFactory( - 'AutomationCompatibleUtils', - ) - automationUtils = await convFactory.deploy() - - linkTokenFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - ) - // need full path because there are two contracts with name MockV3Aggregator - mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', - )) as unknown as MockV3AggregatorFactory - mockArbGasInfoFactory = await ethers.getContractFactory('MockArbGasInfo') - mockOVMGasPriceOracleFactory = await ethers.getContractFactory( - 'MockOVMGasPriceOracle', - ) - upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') - upkeepAutoFunderFactory = - await ethers.getContractFactory('UpkeepAutoFunder') - chainModuleBaseFactory = await ethers.getContractFactory('ChainModuleBase') - arbitrumModuleFactory = await ethers.getContractFactory('ArbitrumModule') - optimismModuleV2Factory = - await ethers.getContractFactory('OptimismModuleV2') - streamsLookupUpkeepFactory = await ethers.getContractFactory( - 'StreamsLookupUpkeep', - ) - - owner = personas.Default - keeper1 = personas.Carol - keeper2 = personas.Eddy - keeper3 = personas.Nancy - keeper4 = personas.Norbert - keeper5 = personas.Nick - nonkeeper = personas.Ned - admin = personas.Neil - payee1 = personas.Nelly - payee2 = personas.Norbert - payee3 = personas.Nick - payee4 = personas.Eddy - payee5 = personas.Carol - upkeepManager = await personas.Norbert.getAddress() - // signers - signer1 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000001', - ) - signer2 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000002', - ) - signer3 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000003', - ) - signer4 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000004', - ) - signer5 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000005', - ) - - keeperAddresses = [ - await keeper1.getAddress(), - await keeper2.getAddress(), - await keeper3.getAddress(), - await keeper4.getAddress(), - await keeper5.getAddress(), - ] - payees = [ - await payee1.getAddress(), - await payee2.getAddress(), - await payee3.getAddress(), - await payee4.getAddress(), - await payee5.getAddress(), - ] - signers = [signer1, signer2, signer3, signer4, signer5] - - // We append 26 random addresses to keepers, payees and signers to get a system of 31 oracles - // This allows f value of 1 - 10 - for (let i = 0; i < 26; i++) { - keeperAddresses.push(randomAddress()) - payees.push(randomAddress()) - signers.push(ethers.Wallet.createRandom()) - } - signerAddresses = [] - for (const signer of signers) { - signerAddresses.push(await signer.getAddress()) - } - - logTriggerConfig = - '0x' + - automationUtils.interface - .encodeFunctionData('_logTriggerConfig', [ - { - contractAddress: randomAddress(), - filterSelector: 0, - topic0: ethers.utils.randomBytes(32), - topic1: ethers.utils.randomBytes(32), - topic2: ethers.utils.randomBytes(32), - topic3: ethers.utils.randomBytes(32), - }, - ]) - .slice(10) - }) - - // This function is similar to registry's _calculatePaymentAmount - // It uses global fastGasWei, linkEth, and assumes isExecution = false (gasFee = fastGasWei*multiplier) - // rest of the parameters are the same - const linkForGas = ( - upkeepGasSpent: BigNumber, - gasOverhead: BigNumber, - gasMultiplier: BigNumber, - premiumPPB: BigNumber, - flatFee: BigNumber, - l1CostWei?: BigNumber, - ) => { - l1CostWei = l1CostWei === undefined ? BigNumber.from(0) : l1CostWei - - const gasSpent = gasOverhead.add(BigNumber.from(upkeepGasSpent)) - const base = gasWei - .mul(gasMultiplier) - .mul(gasSpent) - .mul(linkDivisibility) - .div(linkEth) - const l1Fee = l1CostWei.mul(linkDivisibility).div(linkEth) - const gasPayment = base.add(l1Fee) - - const premium = gasWei - .mul(gasMultiplier) - .mul(upkeepGasSpent) - .add(l1CostWei) - .mul(linkDivisibility) - .div(linkEth) - .mul(premiumPPB) - .div(paymentPremiumBase) - .add(BigNumber.from(flatFee).mul('1000000000000')) - - return { - total: gasPayment.add(premium), - gasPayment, - premium, - } - } - - const verifyMaxPayment = async ( - registry: IAutomationRegistry, - chainModule: IChainModule, - maxl1CostWeWithoutMultiplier?: BigNumber, - ) => { - type TestCase = { - name: string - multiplier: number - gas: number - premium: number - flatFee: number - } - - const tests: TestCase[] = [ - { - name: 'no fees', - multiplier: 1, - gas: 100000, - premium: 0, - flatFee: 0, - }, - { - name: 'basic fees', - multiplier: 1, - gas: 100000, - premium: 250000000, - flatFee: 1000000, - }, - { - name: 'max fees', - multiplier: 3, - gas: 10000000, - premium: 250000000, - flatFee: 1000000, - }, - ] - - const fPlusOne = BigNumber.from(f + 1) - const chainModuleOverheads = await chainModule.getGasOverhead() - const totalConditionalOverhead = registryConditionalOverhead - .add(registryPerSignerGasOverhead.mul(fPlusOne)) - .add( - registryPerPerformByteGasOverhead - .add(chainModuleOverheads.chainModulePerByteOverhead) - .mul( - maxPerformDataSize - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul(fPlusOne), - ), - ), - ) - .add(chainModuleOverheads.chainModuleFixedOverhead) - - const totalLogOverhead = registryLogOverhead - .add(registryPerSignerGasOverhead.mul(fPlusOne)) - .add( - registryPerPerformByteGasOverhead - .add(chainModuleOverheads.chainModulePerByteOverhead) - .mul( - maxPerformDataSize - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul(fPlusOne), - ), - ), - ) - .add(chainModuleOverheads.chainModuleFixedOverhead) - - for (const test of tests) { - await registry.connect(owner).setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - { - paymentPremiumPPB: test.premium, - flatFeeMicroLink: test.flatFee, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier: test.multiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrars: [], - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModule.address, - reorgProtectionEnabled: true, - }, - offchainVersion, - offchainBytes, - ) - - const conditionalPrice = await registry.getMaxPaymentForGas( - Trigger.CONDITION, - test.gas, - ) - expect(conditionalPrice).to.equal( - linkForGas( - BigNumber.from(test.gas), - totalConditionalOverhead, - BigNumber.from(test.multiplier), - BigNumber.from(test.premium), - BigNumber.from(test.flatFee), - maxl1CostWeWithoutMultiplier?.mul(BigNumber.from(test.multiplier)), - ).total, - ) - - const logPrice = await registry.getMaxPaymentForGas(Trigger.LOG, test.gas) - expect(logPrice).to.equal( - linkForGas( - BigNumber.from(test.gas), - totalLogOverhead, - BigNumber.from(test.multiplier), - BigNumber.from(test.premium), - BigNumber.from(test.flatFee), - maxl1CostWeWithoutMultiplier?.mul(BigNumber.from(test.multiplier)), - ).total, - ) - } - } - - const verifyConsistentAccounting = async ( - maxAllowedSpareChange: BigNumber, - ) => { - const expectedLinkBalance = (await registry.getState()).state - .expectedLinkBalance - const linkTokenBalance = await linkToken.balanceOf(registry.address) - const upkeepIdBalance = (await registry.getUpkeep(upkeepId)).balance - let totalKeeperBalance = BigNumber.from(0) - for (let i = 0; i < keeperAddresses.length; i++) { - totalKeeperBalance = totalKeeperBalance.add( - (await registry.getTransmitterInfo(keeperAddresses[i])).balance, - ) - } - const ownerBalance = (await registry.getState()).state.ownerLinkBalance - assert.isTrue(expectedLinkBalance.eq(linkTokenBalance)) - assert.isTrue( - upkeepIdBalance - .add(totalKeeperBalance) - .add(ownerBalance) - .lte(expectedLinkBalance), - ) - assert.isTrue( - expectedLinkBalance - .sub(upkeepIdBalance) - .sub(totalKeeperBalance) - .sub(ownerBalance) - .lte(maxAllowedSpareChange), - ) - } - - interface GetTransmitTXOptions { - numSigners?: number - startingSignerIndex?: number - gasLimit?: BigNumberish - gasPrice?: BigNumberish - performGas?: BigNumberish - performDatas?: string[] - checkBlockNum?: number - checkBlockHash?: string - logBlockHash?: BytesLike - txHash?: BytesLike - logIndex?: number - timestamp?: number - } - - const getTransmitTx = async ( - registry: IAutomationRegistry, - transmitter: Signer, - upkeepIds: BigNumber[], - overrides: GetTransmitTXOptions = {}, - ) => { - const latestBlock = await ethers.provider.getBlock('latest') - const configDigest = (await registry.getState()).state.latestConfigDigest - const config = { - numSigners: f + 1, - startingSignerIndex: 0, - performDatas: undefined, - performGas, - checkBlockNum: latestBlock.number, - checkBlockHash: latestBlock.hash, - logIndex: 0, - txHash: undefined, // assigned uniquely below - logBlockHash: undefined, // assigned uniquely below - timestamp: now(), - gasLimit: undefined, - gasPrice: undefined, - } - Object.assign(config, overrides) - const upkeeps: UpkeepData[] = [] - for (let i = 0; i < upkeepIds.length; i++) { - let trigger: string - switch (getTriggerType(upkeepIds[i])) { - case Trigger.CONDITION: - trigger = encodeBlockTrigger({ - blockNum: config.checkBlockNum, - blockHash: config.checkBlockHash, - }) - break - case Trigger.LOG: - trigger = encodeLogTrigger({ - logBlockHash: config.logBlockHash || ethers.utils.randomBytes(32), - txHash: config.txHash || ethers.utils.randomBytes(32), - logIndex: config.logIndex, - blockNum: config.checkBlockNum, - blockHash: config.checkBlockHash, - }) - break - } - upkeeps.push({ - Id: upkeepIds[i], - performGas: config.performGas, - trigger, - performData: config.performDatas ? config.performDatas[i] : '0x', - }) - } - - const report = makeReport(upkeeps) - const reportContext = [configDigest, epochAndRound5_1, emptyBytes32] - const sigs = signReport( - reportContext, - report, - signers.slice( - config.startingSignerIndex, - config.startingSignerIndex + config.numSigners, - ), - ) - - type txOverride = { - gasLimit?: BigNumberish | Promise - gasPrice?: BigNumberish | Promise - } - const txOverrides: txOverride = {} - if (config.gasLimit) { - txOverrides.gasLimit = config.gasLimit - } - if (config.gasPrice) { - txOverrides.gasPrice = config.gasPrice - } - - return registry - .connect(transmitter) - .transmit( - [configDigest, epochAndRound5_1, emptyBytes32], - report, - sigs.rs, - sigs.ss, - sigs.vs, - txOverrides, - ) - } - - const getTransmitTxWithReport = async ( - registry: IAutomationRegistry, - transmitter: Signer, - report: BytesLike, - ) => { - const configDigest = (await registry.getState()).state.latestConfigDigest - const reportContext = [configDigest, epochAndRound5_1, emptyBytes32] - const sigs = signReport(reportContext, report, signers.slice(0, f + 1)) - - return registry - .connect(transmitter) - .transmit( - [configDigest, epochAndRound5_1, emptyBytes32], - report, - sigs.rs, - sigs.ss, - sigs.vs, - ) - } - - const setup = async () => { - linkToken = await linkTokenFactory.connect(owner).deploy() - gasPriceFeed = await mockV3AggregatorFactory - .connect(owner) - .deploy(0, gasWei) - linkEthFeed = await mockV3AggregatorFactory - .connect(owner) - .deploy(9, linkEth) - const upkeepTranscoderFactory = await ethers.getContractFactory( - 'UpkeepTranscoder4_0', - ) - transcoder = await upkeepTranscoderFactory.connect(owner).deploy() - mockArbGasInfo = await mockArbGasInfoFactory.connect(owner).deploy() - mockOVMGasPriceOracle = await mockOVMGasPriceOracleFactory - .connect(owner) - .deploy() - chainModuleBase = await chainModuleBaseFactory.connect(owner).deploy() - arbitrumModule = await arbitrumModuleFactory.connect(owner).deploy() - optimismModule = await optimismModuleV2Factory.connect(owner).deploy() - streamsLookupUpkeep = await streamsLookupUpkeepFactory - .connect(owner) - .deploy( - BigNumber.from('10000'), - BigNumber.from('100'), - false /* useArbBlock */, - true /* staging */, - false /* verify mercury response */, - ) - - const arbOracleCode = await ethers.provider.send('eth_getCode', [ - mockArbGasInfo.address, - ]) - await ethers.provider.send('hardhat_setCode', [ - '0x000000000000000000000000000000000000006C', - arbOracleCode, - ]) - - const optOracleCode = await ethers.provider.send('eth_getCode', [ - mockOVMGasPriceOracle.address, - ]) - await ethers.provider.send('hardhat_setCode', [ - '0x420000000000000000000000000000000000000F', - optOracleCode, - ]) - - const mockArbSys = await new MockArbSysFactory(owner).deploy() - const arbSysCode = await ethers.provider.send('eth_getCode', [ - mockArbSys.address, - ]) - await ethers.provider.send('hardhat_setCode', [ - '0x0000000000000000000000000000000000000064', - arbSysCode, - ]) - - config = { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrars: [], - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModuleBase.address, - reorgProtectionEnabled: true, - } - - arbConfig = { ...config } - arbConfig.chainModule = arbitrumModule.address - opConfig = { ...config } - opConfig.chainModule = optimismModule.address - - baseConfig = [ - signerAddresses, - keeperAddresses, - f, - config, - offchainVersion, - offchainBytes, - ] - arbConfigParams = [ - signerAddresses, - keeperAddresses, - f, - arbConfig, - offchainVersion, - offchainBytes, - ] - opConfigParams = [ - signerAddresses, - keeperAddresses, - f, - opConfig, - offchainVersion, - offchainBytes, - ] - - registry = await deployRegistry22( - owner, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - zeroAddress, - ) - - arbRegistry = await deployRegistry22( - owner, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - zeroAddress, - ) - - opRegistry = await deployRegistry22( - owner, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - zeroAddress, - ) - - mgRegistry = await deployRegistry22( - owner, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - zeroAddress, - ) - - blankRegistry = await deployRegistry22( - owner, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - zeroAddress, - ) - - registryConditionalOverhead = await registry.getConditionalGasOverhead() - registryLogOverhead = await registry.getLogGasOverhead() - registryPerSignerGasOverhead = await registry.getPerSignerGasOverhead() - registryPerPerformByteGasOverhead = - await registry.getPerPerformByteGasOverhead() - registryTransmitCalldataFixedBytesOverhead = - await registry.getTransmitCalldataFixedBytesOverhead() - registryTransmitCalldataPerSignerBytesOverhead = - await registry.getTransmitCalldataPerSignerBytesOverhead() - cancellationDelay = (await registry.getCancellationDelay()).toNumber() - - await registry.connect(owner).setConfigTypeSafe(...baseConfig) - await mgRegistry.connect(owner).setConfigTypeSafe(...baseConfig) - await arbRegistry.connect(owner).setConfigTypeSafe(...arbConfigParams) - await opRegistry.connect(owner).setConfigTypeSafe(...opConfigParams) - for (const reg of [registry, arbRegistry, opRegistry, mgRegistry]) { - await reg.connect(owner).setPayees(payees) - await linkToken.connect(admin).approve(reg.address, toWei('1000')) - await linkToken.connect(owner).approve(reg.address, toWei('1000')) - } - - mock = await upkeepMockFactory.deploy() - await linkToken - .connect(owner) - .transfer(await admin.getAddress(), toWei('1000')) - let tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - upkeepId = await getUpkeepID(tx) - - autoFunderUpkeep = await upkeepAutoFunderFactory - .connect(owner) - .deploy(linkToken.address, registry.address) - tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](autoFunderUpkeep.address, performGas, autoFunderUpkeep.address, randomBytes, '0x') - afUpkeepId = await getUpkeepID(tx) - - ltUpkeep = await deployMockContract(owner, ILogAutomationactory.abi) - tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,uint8,bytes,bytes,bytes)' - ](ltUpkeep.address, performGas, await admin.getAddress(), Trigger.LOG, '0x', logTriggerConfig, emptyBytes) - logUpkeepId = await getUpkeepID(tx) - - await autoFunderUpkeep.setUpkeepId(afUpkeepId) - // Give enough funds for upkeep as well as to the upkeep contract - await linkToken - .connect(owner) - .transfer(autoFunderUpkeep.address, toWei('1000')) - - tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](streamsLookupUpkeep.address, performGas, await admin.getAddress(), randomBytes, '0x') - streamsLookupUpkeepId = await getUpkeepID(tx) - } - - const getMultipleUpkeepsDeployedAndFunded = async ( - numPassingConditionalUpkeeps: number, - numPassingLogUpkeeps: number, - numFailingUpkeeps: number, - ) => { - const passingConditionalUpkeepIds = [] - const passingLogUpkeepIds = [] - const failingUpkeepIds = [] - for (let i = 0; i < numPassingConditionalUpkeeps; i++) { - const mock = await upkeepMockFactory.deploy() - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(BigNumber.from('0')) - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - const condUpkeepId = await getUpkeepID(tx) - passingConditionalUpkeepIds.push(condUpkeepId) - - // Add funds to passing upkeeps - await registry.connect(admin).addFunds(condUpkeepId, toWei('100')) - } - for (let i = 0; i < numPassingLogUpkeeps; i++) { - const mock = await upkeepMockFactory.deploy() - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(BigNumber.from('0')) - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,uint8,bytes,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), Trigger.LOG, '0x', logTriggerConfig, emptyBytes) - const logUpkeepId = await getUpkeepID(tx) - passingLogUpkeepIds.push(logUpkeepId) - - // Add funds to passing upkeeps - await registry.connect(admin).addFunds(logUpkeepId, toWei('100')) - } - for (let i = 0; i < numFailingUpkeeps; i++) { - const mock = await upkeepMockFactory.deploy() - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(BigNumber.from('0')) - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - const failingUpkeepId = await getUpkeepID(tx) - failingUpkeepIds.push(failingUpkeepId) - } - return { - passingConditionalUpkeepIds, - passingLogUpkeepIds, - failingUpkeepIds, - } - } - - beforeEach(async () => { - await loadFixture(setup) - }) - - describe('#transmit', () => { - const fArray = [1, 5, 10] - - it('reverts when registry is paused', async () => { - await registry.connect(owner).pause() - await evmRevertCustomError( - getTransmitTx(registry, keeper1, [upkeepId]), - registry, - 'RegistryPaused', - ) - }) - - it('reverts when called by non active transmitter', async () => { - await evmRevertCustomError( - getTransmitTx(registry, payee1, [upkeepId]), - registry, - 'OnlyActiveTransmitters', - ) - }) - - it('reverts when report data lengths mismatches', async () => { - const upkeepIds = [] - const gasLimits: BigNumber[] = [] - const triggers: string[] = [] - const performDatas = [] - - upkeepIds.push(upkeepId) - gasLimits.push(performGas) - triggers.push('0x') - performDatas.push('0x') - // Push an extra perform data - performDatas.push('0x') - - const report = encodeReport({ - fastGasWei: 0, - linkNative: 0, - upkeepIds, - gasLimits, - triggers, - performDatas, - }) - - await evmRevertCustomError( - getTransmitTxWithReport(registry, keeper1, report), - registry, - 'InvalidReport', - ) - }) - - it('returns early when invalid upkeepIds are included in report', async () => { - const tx = await getTransmitTx(registry, keeper1, [ - upkeepId.add(BigNumber.from('1')), - ]) - - const receipt = await tx.wait() - const cancelledUpkeepReportLogs = parseCancelledUpkeepReportLogs(receipt) - // exactly 1 CancelledUpkeepReport log should be emitted - assert.equal(cancelledUpkeepReportLogs.length, 1) - }) - - it('performs even when the upkeep has insufficient funds and the upkeep pays out all the remaining balance', async () => { - // add very little fund to this upkeep - await registry.connect(admin).addFunds(upkeepId, BigNumber.from(10)) - const tx = await getTransmitTx(registry, keeper1, [upkeepId]) - const receipt = await tx.wait() - // the upkeep is underfunded in transmit but still performed - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal(upkeepPerformedLogs.length, 1) - const balance = (await registry.getUpkeep(upkeepId)).balance - assert.equal(balance.toNumber(), 0) - }) - - context('When the upkeep is funded', async () => { - beforeEach(async () => { - // Fund the upkeep - await Promise.all([ - registry.connect(admin).addFunds(upkeepId, toWei('100')), - registry.connect(admin).addFunds(logUpkeepId, toWei('100')), - ]) - }) - - it('handles duplicate upkeepIDs', async () => { - const tests: [string, BigNumber, number, number][] = [ - // [name, upkeep, num stale, num performed] - ['conditional', upkeepId, 1, 1], // checkBlocks must be sequential - ['log-trigger', logUpkeepId, 0, 2], // logs are deduped based on the "trigger ID" - ] - for (const [type, id, nStale, nPerformed] of tests) { - const tx = await getTransmitTx(registry, keeper1, [id, id]) - const receipt = await tx.wait() - const staleUpkeepReport = parseStaleUpkeepReportLogs(receipt) - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal( - staleUpkeepReport.length, - nStale, - `wrong log count for ${type} upkeep`, - ) - assert.equal( - upkeepPerformedLogs.length, - nPerformed, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('handles duplicate log triggers', async () => { - const logBlockHash = ethers.utils.randomBytes(32) - const txHash = ethers.utils.randomBytes(32) - const logIndex = 0 - const expectedDedupKey = ethers.utils.solidityKeccak256( - ['uint256', 'bytes32', 'bytes32', 'uint32'], - [logUpkeepId, logBlockHash, txHash, logIndex], - ) - assert.isFalse(await registry.hasDedupKey(expectedDedupKey)) - const tx = await getTransmitTx( - registry, - keeper1, - [logUpkeepId, logUpkeepId], - { logBlockHash, txHash, logIndex }, // will result in the same dedup key - ) - const receipt = await tx.wait() - const staleUpkeepReport = parseStaleUpkeepReportLogs(receipt) - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal(staleUpkeepReport.length, 1) - assert.equal(upkeepPerformedLogs.length, 1) - assert.isTrue(await registry.hasDedupKey(expectedDedupKey)) - await expect(tx) - .to.emit(registry, 'DedupKeyAdded') - .withArgs(expectedDedupKey) - }) - - it('returns early when check block number is less than last perform (block)', async () => { - // First perform an upkeep to put last perform block number on upkeep state - const tx = await getTransmitTx(registry, keeper1, [upkeepId]) - await tx.wait() - const lastPerformed = (await registry.getUpkeep(upkeepId)) - .lastPerformedBlockNumber - const lastPerformBlock = await ethers.provider.getBlock(lastPerformed) - assert.equal(lastPerformed.toString(), tx.blockNumber?.toString()) - // Try to transmit a report which has checkBlockNumber = lastPerformed-1, should result in stale report - const transmitTx = await getTransmitTx(registry, keeper1, [upkeepId], { - checkBlockNum: lastPerformBlock.number - 1, - checkBlockHash: lastPerformBlock.parentHash, - }) - const receipt = await transmitTx.wait() - const staleUpkeepReportLogs = parseStaleUpkeepReportLogs(receipt) - // exactly 1 StaleUpkeepReportLogs log should be emitted - assert.equal(staleUpkeepReportLogs.length, 1) - }) - - it('handles case when check block hash does not match', async () => { - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - // Try to transmit a report which has incorrect checkBlockHash - const tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number - 1, - checkBlockHash: latestBlock.hash, // should be latestBlock.parentHash - }) - - const receipt = await tx.wait() - const reorgedUpkeepReportLogs = parseReorgedUpkeepReportLogs(receipt) - // exactly 1 ReorgedUpkeepReportLogs log should be emitted - assert.equal( - reorgedUpkeepReportLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('handles case when check block number is older than 256 blocks', async () => { - for (let i = 0; i < 256; i++) { - await ethers.provider.send('evm_mine', []) - } - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - const old = await ethers.provider.getBlock(latestBlock.number - 256) - // Try to transmit a report which has incorrect checkBlockHash - const tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: old.number, - checkBlockHash: old.hash, - }) - - const receipt = await tx.wait() - const reorgedUpkeepReportLogs = parseReorgedUpkeepReportLogs(receipt) - // exactly 1 ReorgedUpkeepReportLogs log should be emitted - assert.equal( - reorgedUpkeepReportLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('allows bypassing reorg protection with empty blockhash', async () => { - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - const tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number, - checkBlockHash: emptyBytes32, - }) - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal( - upkeepPerformedLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('allows bypassing reorg protection with reorgProtectionEnabled false config', async () => { - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - const newConfig = config - newConfig.reorgProtectionEnabled = false - await registry // used to test initial configurations - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ) - - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - // Try to transmit a report which has incorrect checkBlockHash - const tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number - 1, - checkBlockHash: latestBlock.hash, // should be latestBlock.parentHash - }) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal( - upkeepPerformedLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('allows very old trigger block numbers when bypassing reorg protection with reorgProtectionEnabled config', async () => { - const newConfig = config - newConfig.reorgProtectionEnabled = false - await registry // used to test initial configurations - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ) - for (let i = 0; i < 256; i++) { - await ethers.provider.send('evm_mine', []) - } - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - const old = await ethers.provider.getBlock(latestBlock.number - 256) - // Try to transmit a report which has incorrect checkBlockHash - const tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: old.number, - checkBlockHash: old.hash, - }) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal( - upkeepPerformedLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('allows very old trigger block numbers when bypassing reorg protection with empty blockhash', async () => { - // mine enough blocks so that blockhash(1) is unavailable - for (let i = 0; i <= 256; i++) { - await ethers.provider.send('evm_mine', []) - } - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: 1, - checkBlockHash: emptyBytes32, - }) - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal( - upkeepPerformedLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('returns early when future block number is provided as trigger, irrespective of blockhash being present', async () => { - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - - // Should fail when blockhash is empty - let tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number + 100, - checkBlockHash: emptyBytes32, - }) - let receipt = await tx.wait() - let reorgedUpkeepReportLogs = parseReorgedUpkeepReportLogs(receipt) - // exactly 1 ReorgedUpkeepReportLogs log should be emitted - assert.equal( - reorgedUpkeepReportLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - - // Should also fail when blockhash is not empty - tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number + 100, - checkBlockHash: latestBlock.hash, - }) - receipt = await tx.wait() - reorgedUpkeepReportLogs = parseReorgedUpkeepReportLogs(receipt) - // exactly 1 ReorgedUpkeepReportLogs log should be emitted - assert.equal( - reorgedUpkeepReportLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('returns early when future block number is provided as trigger, irrespective of reorgProtectionEnabled config', async () => { - const newConfig = config - newConfig.reorgProtectionEnabled = false - await registry // used to test initial configurations - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ) - const tests: [string, BigNumber][] = [ - ['conditional', upkeepId], - ['log-trigger', logUpkeepId], - ] - for (const [type, id] of tests) { - const latestBlock = await ethers.provider.getBlock('latest') - - // Should fail when blockhash is empty - let tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number + 100, - checkBlockHash: emptyBytes32, - }) - let receipt = await tx.wait() - let reorgedUpkeepReportLogs = parseReorgedUpkeepReportLogs(receipt) - // exactly 1 ReorgedUpkeepReportLogs log should be emitted - assert.equal( - reorgedUpkeepReportLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - - // Should also fail when blockhash is not empty - tx = await getTransmitTx(registry, keeper1, [id], { - checkBlockNum: latestBlock.number + 100, - checkBlockHash: latestBlock.hash, - }) - receipt = await tx.wait() - reorgedUpkeepReportLogs = parseReorgedUpkeepReportLogs(receipt) - // exactly 1 ReorgedUpkeepReportLogs log should be emitted - assert.equal( - reorgedUpkeepReportLogs.length, - 1, - `wrong log count for ${type} upkeep`, - ) - } - }) - - it('returns early when upkeep is cancelled and cancellation delay has gone', async () => { - const latestBlockReport = await makeLatestBlockReport([upkeepId]) - await registry.connect(admin).cancelUpkeep(upkeepId) - - for (let i = 0; i < cancellationDelay; i++) { - await ethers.provider.send('evm_mine', []) - } - - const tx = await getTransmitTxWithReport( - registry, - keeper1, - latestBlockReport, - ) - - const receipt = await tx.wait() - const cancelledUpkeepReportLogs = - parseCancelledUpkeepReportLogs(receipt) - // exactly 1 CancelledUpkeepReport log should be emitted - assert.equal(cancelledUpkeepReportLogs.length, 1) - }) - - it('does not revert if the target cannot execute', async () => { - await mock.setCanPerform(false) - const tx = await getTransmitTx(registry, keeper1, [upkeepId]) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const success = upkeepPerformedLog.args.success - assert.equal(success, false) - }) - - it('does not revert if the target runs out of gas', async () => { - await mock.setCanPerform(false) - - const tx = await getTransmitTx(registry, keeper1, [upkeepId], { - performGas: 10, // too little gas - }) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const success = upkeepPerformedLog.args.success - assert.equal(success, false) - }) - - it('reverts if not enough gas supplied', async () => { - await evmRevert( - getTransmitTx(registry, keeper1, [upkeepId], { - gasLimit: performGas, - }), - ) - }) - - it('executes the data passed to the registry', async () => { - await mock.setCanPerform(true) - - const tx = await getTransmitTx(registry, keeper1, [upkeepId], { - performDatas: [randomBytes], - }) - const receipt = await tx.wait() - - const upkeepPerformedWithABI = [ - 'event UpkeepPerformedWith(bytes upkeepData)', - ] - const iface = new ethers.utils.Interface(upkeepPerformedWithABI) - const parsedLogs = [] - for (let i = 0; i < receipt.logs.length; i++) { - const log = receipt.logs[i] - try { - parsedLogs.push(iface.parseLog(log)) - } catch (e) { - // ignore log - } - } - assert.equal(parsedLogs.length, 1) - assert.equal(parsedLogs[0].args.upkeepData, randomBytes) - }) - - it('uses actual execution price for payment and premium calculation', async () => { - // Actual multiplier is 2, but we set gasPrice to be 1x gasWei - const gasPrice = gasWei.mul(BigNumber.from('1')) - await mock.setCanPerform(true) - const registryPremiumBefore = (await registry.getState()).state - .totalPremium - const tx = await getTransmitTx(registry, keeper1, [upkeepId], { - gasPrice, - }) - const receipt = await tx.wait() - const registryPremiumAfter = (await registry.getState()).state - .totalPremium - const premium = registryPremiumAfter.sub(registryPremiumBefore) - - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const gasUsed = upkeepPerformedLog.args.gasUsed - const gasOverhead = upkeepPerformedLog.args.gasOverhead - const totalPayment = upkeepPerformedLog.args.totalPayment - - assert.equal( - linkForGas( - gasUsed, - gasOverhead, - BigNumber.from('1'), // Not the config multiplier, but the actual gas used - paymentPremiumPPB, - flatFeeMicroLink, - ).total.toString(), - totalPayment.toString(), - ) - - assert.equal( - linkForGas( - gasUsed, - gasOverhead, - BigNumber.from('1'), // Not the config multiplier, but the actual gas used - paymentPremiumPPB, - flatFeeMicroLink, - ).premium.toString(), - premium.toString(), - ) - }) - - it('only pays at a rate up to the gas ceiling [ @skip-coverage ]', async () => { - // Actual multiplier is 2, but we set gasPrice to be 10x - const gasPrice = gasWei.mul(BigNumber.from('10')) - await mock.setCanPerform(true) - - const tx = await getTransmitTx(registry, keeper1, [upkeepId], { - gasPrice, - }) - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const gasUsed = upkeepPerformedLog.args.gasUsed - const gasOverhead = upkeepPerformedLog.args.gasOverhead - const totalPayment = upkeepPerformedLog.args.totalPayment - - assert.equal( - linkForGas( - gasUsed, - gasOverhead, - gasCeilingMultiplier, // Should be same with exisitng multiplier - paymentPremiumPPB, - flatFeeMicroLink, - ).total.toString(), - totalPayment.toString(), - ) - }) - - it('correctly accounts for l payment', async () => { - await mock.setCanPerform(true) - // Same as MockArbGasInfo.sol - const l1CostWeiArb = BigNumber.from(1000000) - - let tx = await arbRegistry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - const testUpkeepId = await getUpkeepID(tx) - await arbRegistry.connect(owner).addFunds(testUpkeepId, toWei('100')) - - // Do the thing - tx = await getTransmitTx( - arbRegistry, - keeper1, - [testUpkeepId], - - { gasPrice: gasWei.mul('5') }, // High gas price so that it gets capped - ) - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const gasUsed = upkeepPerformedLog.args.gasUsed - const gasOverhead = upkeepPerformedLog.args.gasOverhead - const totalPayment = upkeepPerformedLog.args.totalPayment - - assert.equal( - linkForGas( - gasUsed, - gasOverhead, - gasCeilingMultiplier, - paymentPremiumPPB, - flatFeeMicroLink, - l1CostWeiArb, - ).total.toString(), - totalPayment.toString(), - ) - }) - - itMaybe('can self fund', async () => { - const maxPayment = await registry.getMaxPaymentForGas( - Trigger.CONDITION, - performGas, - ) - - // First set auto funding amount to 0 and verify that balance is deducted upon performUpkeep - let initialBalance = toWei('100') - await registry.connect(owner).addFunds(afUpkeepId, initialBalance) - await autoFunderUpkeep.setAutoFundLink(0) - await autoFunderUpkeep.setIsEligible(true) - await getTransmitTx(registry, keeper1, [afUpkeepId]) - - let postUpkeepBalance = (await registry.getUpkeep(afUpkeepId)).balance - assert.isTrue(postUpkeepBalance.lt(initialBalance)) // Balance should be deducted - assert.isTrue(postUpkeepBalance.gte(initialBalance.sub(maxPayment))) // Balance should not be deducted more than maxPayment - - // Now set auto funding amount to 100 wei and verify that the balance increases - initialBalance = postUpkeepBalance - const autoTopupAmount = toWei('100') - await autoFunderUpkeep.setAutoFundLink(autoTopupAmount) - await autoFunderUpkeep.setIsEligible(true) - await getTransmitTx(registry, keeper1, [afUpkeepId]) - - postUpkeepBalance = (await registry.getUpkeep(afUpkeepId)).balance - // Balance should increase by autoTopupAmount and decrease by max maxPayment - assert.isTrue( - postUpkeepBalance.gte( - initialBalance.add(autoTopupAmount).sub(maxPayment), - ), - ) - }) - - it('can self cancel', async () => { - await registry.connect(owner).addFunds(afUpkeepId, toWei('100')) - - await autoFunderUpkeep.setIsEligible(true) - await autoFunderUpkeep.setShouldCancel(true) - - let registration = await registry.getUpkeep(afUpkeepId) - const oldExpiration = registration.maxValidBlocknumber - - // Do the thing - await getTransmitTx(registry, keeper1, [afUpkeepId]) - - // Verify upkeep gets cancelled - registration = await registry.getUpkeep(afUpkeepId) - const newExpiration = registration.maxValidBlocknumber - assert.isTrue(newExpiration.lt(oldExpiration)) - }) - - it('reverts when configDigest mismatches', async () => { - const report = await makeLatestBlockReport([upkeepId]) - const reportContext = [emptyBytes32, epochAndRound5_1, emptyBytes32] // wrong config digest - const sigs = signReport(reportContext, report, signers.slice(0, f + 1)) - await evmRevertCustomError( - registry - .connect(keeper1) - .transmit( - [reportContext[0], reportContext[1], reportContext[2]], - report, - sigs.rs, - sigs.ss, - sigs.vs, - ), - registry, - 'ConfigDigestMismatch', - ) - }) - - it('reverts with incorrect number of signatures', async () => { - const configDigest = (await registry.getState()).state - .latestConfigDigest - const report = await makeLatestBlockReport([upkeepId]) - const reportContext = [configDigest, epochAndRound5_1, emptyBytes32] // wrong config digest - const sigs = signReport(reportContext, report, signers.slice(0, f + 2)) - await evmRevertCustomError( - registry - .connect(keeper1) - .transmit( - [reportContext[0], reportContext[1], reportContext[2]], - report, - sigs.rs, - sigs.ss, - sigs.vs, - ), - registry, - 'IncorrectNumberOfSignatures', - ) - }) - - it('reverts with invalid signature for inactive signers', async () => { - const configDigest = (await registry.getState()).state - .latestConfigDigest - const report = await makeLatestBlockReport([upkeepId]) - const reportContext = [configDigest, epochAndRound5_1, emptyBytes32] // wrong config digest - const sigs = signReport(reportContext, report, [ - new ethers.Wallet(ethers.Wallet.createRandom()), - new ethers.Wallet(ethers.Wallet.createRandom()), - ]) - await evmRevertCustomError( - registry - .connect(keeper1) - .transmit( - [reportContext[0], reportContext[1], reportContext[2]], - report, - sigs.rs, - sigs.ss, - sigs.vs, - ), - registry, - 'OnlyActiveSigners', - ) - }) - - it('reverts with invalid signature for duplicated signers', async () => { - const configDigest = (await registry.getState()).state - .latestConfigDigest - const report = await makeLatestBlockReport([upkeepId]) - const reportContext = [configDigest, epochAndRound5_1, emptyBytes32] // wrong config digest - const sigs = signReport(reportContext, report, [signer1, signer1]) - await evmRevertCustomError( - registry - .connect(keeper1) - .transmit( - [reportContext[0], reportContext[1], reportContext[2]], - report, - sigs.rs, - sigs.ss, - sigs.vs, - ), - registry, - 'DuplicateSigners', - ) - }) - - itMaybe( - 'has a large enough gas overhead to cover upkeep that use all its gas [ @skip-coverage ]', - async () => { - await registry.connect(owner).setConfigTypeSafe( - signerAddresses, - keeperAddresses, - 10, // maximise f to maximise overhead - config, - offchainVersion, - offchainBytes, - ) - const tx = await registry - .connect(owner) - ['registerUpkeep(address,uint32,address,bytes,bytes)']( - mock.address, - maxPerformGas, // max allowed gas - await admin.getAddress(), - randomBytes, - '0x', - ) - const testUpkeepId = await getUpkeepID(tx) - await registry.connect(admin).addFunds(testUpkeepId, toWei('100')) - - let performData = '0x' - for (let i = 0; i < maxPerformDataSize.toNumber(); i++) { - performData += '11' - } // max allowed performData - - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(maxPerformGas) - - await getTransmitTx(registry, keeper1, [testUpkeepId], { - gasLimit: maxPerformGas.add(transmitGasOverhead), - numSigners: 11, - performDatas: [performData], - }) // Should not revert - }, - ) - - itMaybe( - 'performs upkeep, deducts payment, updates lastPerformed and emits events', - async () => { - await mock.setCanPerform(true) - - for (const i in fArray) { - const newF = fArray[i] - await registry - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - newF, - config, - offchainVersion, - offchainBytes, - ) - const checkBlock = await ethers.provider.getBlock('latest') - - const keeperBefore = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const registrationBefore = await registry.getUpkeep(upkeepId) - const registryPremiumBefore = (await registry.getState()).state - .totalPremium - const keeperLinkBefore = await linkToken.balanceOf( - await keeper1.getAddress(), - ) - const registryLinkBefore = await linkToken.balanceOf( - registry.address, - ) - - // Do the thing - const tx = await getTransmitTx(registry, keeper1, [upkeepId], { - checkBlockNum: checkBlock.number, - checkBlockHash: checkBlock.hash, - numSigners: newF + 1, - }) - - const receipt = await tx.wait() - - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const id = upkeepPerformedLog.args.id - const success = upkeepPerformedLog.args.success - const trigger = upkeepPerformedLog.args.trigger - const gasUsed = upkeepPerformedLog.args.gasUsed - const gasOverhead = upkeepPerformedLog.args.gasOverhead - const totalPayment = upkeepPerformedLog.args.totalPayment - assert.equal(id.toString(), upkeepId.toString()) - assert.equal(success, true) - assert.equal( - trigger, - encodeBlockTrigger({ - blockNum: checkBlock.number, - blockHash: checkBlock.hash, - }), - ) - assert.isTrue(gasUsed.gt(BigNumber.from('0'))) - assert.isTrue(gasOverhead.gt(BigNumber.from('0'))) - assert.isTrue(totalPayment.gt(BigNumber.from('0'))) - - const keeperAfter = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const registrationAfter = await registry.getUpkeep(upkeepId) - const keeperLinkAfter = await linkToken.balanceOf( - await keeper1.getAddress(), - ) - const registryLinkAfter = await linkToken.balanceOf( - registry.address, - ) - const registryPremiumAfter = (await registry.getState()).state - .totalPremium - const premium = registryPremiumAfter.sub(registryPremiumBefore) - // Keeper payment is gasPayment + premium / num keepers - const keeperPayment = totalPayment - .sub(premium) - .add(premium.div(BigNumber.from(keeperAddresses.length))) - - assert.equal( - keeperAfter.balance.sub(keeperPayment).toString(), - keeperBefore.balance.toString(), - ) - assert.equal( - registrationBefore.balance.sub(totalPayment).toString(), - registrationAfter.balance.toString(), - ) - assert.isTrue(keeperLinkAfter.eq(keeperLinkBefore)) - assert.isTrue(registryLinkBefore.eq(registryLinkAfter)) - - // Amount spent should be updated correctly - assert.equal( - registrationAfter.amountSpent.sub(totalPayment).toString(), - registrationBefore.amountSpent.toString(), - ) - assert.isTrue( - registrationAfter.amountSpent - .sub(registrationBefore.amountSpent) - .eq(registrationBefore.balance.sub(registrationAfter.balance)), - ) - // Last perform block number should be updated - assert.equal( - registrationAfter.lastPerformedBlockNumber.toString(), - tx.blockNumber?.toString(), - ) - - // Latest epoch should be 5 - assert.equal((await registry.getState()).state.latestEpoch, 5) - } - }, - ) - - // skipping it for now as it is passing in local but failing in CI - describe.skip('Gas benchmarking conditional upkeeps [ @skip-coverage ]', function () { - const fs = [1, 10] - fs.forEach(function (newF) { - it( - 'When f=' + - newF + - ' calculates gas overhead appropriately within a margin for different scenarios', - async () => { - // Perform the upkeep once to remove non-zero storage slots and have predictable gas measurement - let tx = await getTransmitTx(registry, keeper1, [upkeepId]) - await tx.wait() - - // Different test scenarios - let longBytes = '0x' - for (let i = 0; i < maxPerformDataSize.toNumber(); i++) { - longBytes += '11' - } - const upkeepSuccessArray = [true, false] - const performGasArray = [5000, performGas] - const performDataArray = ['0x', longBytes] - const chainModuleOverheads = - await chainModuleBase.getGasOverhead() - - for (const i in upkeepSuccessArray) { - for (const j in performGasArray) { - for (const k in performDataArray) { - const upkeepSuccess = upkeepSuccessArray[i] - const performGas = performGasArray[j] - const performData = performDataArray[k] - - await mock.setCanPerform(upkeepSuccess) - await mock.setPerformGasToBurn(performGas) - await registry - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - newF, - config, - offchainVersion, - offchainBytes, - ) - tx = await getTransmitTx(registry, keeper1, [upkeepId], { - numSigners: newF + 1, - performDatas: [performData], - }) - const receipt = await tx.wait() - const upkeepPerformedLogs = - parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - - const upkeepGasUsed = upkeepPerformedLog.args.gasUsed - const chargedGasOverhead = - upkeepPerformedLog.args.gasOverhead - const actualGasOverhead = receipt.gasUsed.sub(upkeepGasUsed) - const estimatedGasOverhead = registryConditionalOverhead - .add( - registryPerSignerGasOverhead.mul( - BigNumber.from(newF + 1), - ), - ) - .add( - registryPerPerformByteGasOverhead - .add(chainModuleOverheads.chainModulePerByteOverhead) - .mul( - BigNumber.from(performData.length / 2 - 1) - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul( - BigNumber.from(newF + 1), - ), - ), - ), - ) - .add(chainModuleOverheads.chainModuleFixedOverhead) - - assert.isTrue(upkeepGasUsed.gt(BigNumber.from('0'))) - assert.isTrue(chargedGasOverhead.gt(BigNumber.from('0'))) - assert.isTrue(actualGasOverhead.gt(BigNumber.from('0'))) - - console.log( - 'Gas Benchmarking conditional upkeeps:', - 'upkeepSuccess=', - upkeepSuccess, - 'performGas=', - performGas.toString(), - 'performData length=', - performData.length / 2 - 1, - 'sig verification ( f =', - newF, - '): estimated overhead: ', - estimatedGasOverhead.toString(), - ' charged overhead: ', - chargedGasOverhead.toString(), - ' actual overhead: ', - actualGasOverhead.toString(), - ' calculation margin over gasUsed: ', - chargedGasOverhead.sub(actualGasOverhead).toString(), - ' estimation margin over gasUsed: ', - estimatedGasOverhead.sub(actualGasOverhead).toString(), - ) - - // The actual gas overhead should be less than charged gas overhead, but not by a lot - // The charged gas overhead is controlled by ACCOUNTING_FIXED_GAS_OVERHEAD and - // ACCOUNTING_PER_UPKEEP_GAS_OVERHEAD, and their correct values should be set to - // satisfy constraints in multiple places - assert.isTrue( - chargedGasOverhead.gt(actualGasOverhead), - 'Gas overhead calculated is too low, increase account gas variables (ACCOUNTING_FIXED_GAS_OVERHEAD/ACCOUNTING_PER_UPKEEP_GAS_OVERHEAD) by at least ' + - actualGasOverhead.sub(chargedGasOverhead).toString(), - ) - assert.isTrue( - chargedGasOverhead - .sub(actualGasOverhead) - .lt(gasCalculationMargin), - 'Gas overhead calculated is too high, decrease account gas variables (ACCOUNTING_FIXED_GAS_OVERHEAD/ACCOUNTING_PER_SIGNER_GAS_OVERHEAD) by at least ' + - chargedGasOverhead - .sub(actualGasOverhead) - .sub(gasCalculationMargin) - .toString(), - ) - - // The estimated overhead during checkUpkeep should be close to the actual overhead in transaction - // It should be greater than the actual overhead but not by a lot - // The estimated overhead is controlled by variables - // REGISTRY_CONDITIONAL_OVERHEAD, REGISTRY_LOG_OVERHEAD, REGISTRY_PER_SIGNER_GAS_OVERHEAD - // REGISTRY_PER_PERFORM_BYTE_GAS_OVERHEAD - assert.isTrue( - estimatedGasOverhead.gt(actualGasOverhead), - 'Gas overhead estimated in check upkeep is too low, increase estimation gas variables (REGISTRY_CONDITIONAL_OVERHEAD/REGISTRY_LOG_OVERHEAD/REGISTRY_PER_SIGNER_GAS_OVERHEAD/REGISTRY_PER_PERFORM_BYTE_GAS_OVERHEAD) by at least ' + - estimatedGasOverhead.sub(chargedGasOverhead).toString(), - ) - assert.isTrue( - estimatedGasOverhead - .sub(actualGasOverhead) - .lt(gasEstimationMargin), - 'Gas overhead estimated is too high, decrease estimation gas variables (REGISTRY_CONDITIONAL_OVERHEAD/REGISTRY_LOG_OVERHEAD/REGISTRY_PER_SIGNER_GAS_OVERHEAD/REGISTRY_PER_PERFORM_BYTE_GAS_OVERHEAD) by at least ' + - estimatedGasOverhead - .sub(actualGasOverhead) - .sub(gasEstimationMargin) - .toString(), - ) - } - } - } - }, - ) - }) - }) - - describe('Gas benchmarking log upkeeps [ @skip-coverage ]', function () { - const fs = [1, 10] - fs.forEach(function (newF) { - it( - 'When f=' + - newF + - ' calculates gas overhead appropriately within a margin', - async () => { - // Perform the upkeep once to remove non-zero storage slots and have predictable gas measurement - let tx = await getTransmitTx(registry, keeper1, [logUpkeepId]) - await tx.wait() - const performData = '0x' - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(performGas) - await registry.setConfigTypeSafe( - signerAddresses, - keeperAddresses, - newF, - config, - offchainVersion, - offchainBytes, - ) - tx = await getTransmitTx(registry, keeper1, [logUpkeepId], { - numSigners: newF + 1, - performDatas: [performData], - }) - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly 1 Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, 1) - const upkeepPerformedLog = upkeepPerformedLogs[0] - const chainModuleOverheads = - await chainModuleBase.getGasOverhead() - - const upkeepGasUsed = upkeepPerformedLog.args.gasUsed - const chargedGasOverhead = upkeepPerformedLog.args.gasOverhead - const actualGasOverhead = receipt.gasUsed.sub(upkeepGasUsed) - const estimatedGasOverhead = registryLogOverhead - .add(registryPerSignerGasOverhead.mul(BigNumber.from(newF + 1))) - .add( - registryPerPerformByteGasOverhead - .add(chainModuleOverheads.chainModulePerByteOverhead) - .mul( - BigNumber.from(performData.length / 2 - 1) - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul( - BigNumber.from(newF + 1), - ), - ), - ), - ) - .add(chainModuleOverheads.chainModuleFixedOverhead) - - assert.isTrue(upkeepGasUsed.gt(BigNumber.from('0'))) - assert.isTrue(chargedGasOverhead.gt(BigNumber.from('0'))) - assert.isTrue(actualGasOverhead.gt(BigNumber.from('0'))) - - console.log( - 'Gas Benchmarking log upkeeps:', - 'upkeepSuccess=', - true, - 'performGas=', - performGas.toString(), - 'performData length=', - performData.length / 2 - 1, - 'sig verification ( f =', - newF, - '): estimated overhead: ', - estimatedGasOverhead.toString(), - ' charged overhead: ', - chargedGasOverhead.toString(), - ' actual overhead: ', - actualGasOverhead.toString(), - ' calculation margin over gasUsed: ', - chargedGasOverhead.sub(actualGasOverhead).toString(), - ' estimation margin over gasUsed: ', - estimatedGasOverhead.sub(actualGasOverhead).toString(), - ) - - assert.isTrue( - chargedGasOverhead.gt(actualGasOverhead), - 'Gas overhead calculated is too low, increase account gas variables (ACCOUNTING_FIXED_GAS_OVERHEAD/ACCOUNTING_PER_UPKEEP_GAS_OVERHEAD) by at least ' + - actualGasOverhead.sub(chargedGasOverhead).toString(), - ) - assert.isTrue( - chargedGasOverhead - .sub(actualGasOverhead) - .lt(gasCalculationMargin), - 'Gas overhead calculated is too high, decrease account gas variables (ACCOUNTING_FIXED_GAS_OVERHEAD/ACCOUNTING_PER_SIGNER_GAS_OVERHEAD) by at least ' + - chargedGasOverhead - .sub(actualGasOverhead) - .sub(gasCalculationMargin) - .toString(), - ) - - assert.isTrue( - estimatedGasOverhead.gt(actualGasOverhead), - 'Gas overhead estimated in check upkeep is too low, increase estimation gas variables (REGISTRY_CONDITIONAL_OVERHEAD/REGISTRY_LOG_OVERHEAD/REGISTRY_PER_SIGNER_GAS_OVERHEAD/REGISTRY_PER_PERFORM_BYTE_GAS_OVERHEAD) by at least ' + - estimatedGasOverhead.sub(chargedGasOverhead).toString(), - ) - assert.isTrue( - estimatedGasOverhead - .sub(actualGasOverhead) - .lt(gasEstimationMargin), - 'Gas overhead estimated is too high, decrease estimation gas variables (REGISTRY_CONDITIONAL_OVERHEAD/REGISTRY_LOG_OVERHEAD/REGISTRY_PER_SIGNER_GAS_OVERHEAD/REGISTRY_PER_PERFORM_BYTE_GAS_OVERHEAD) by at least ' + - estimatedGasOverhead - .sub(actualGasOverhead) - .sub(gasEstimationMargin) - .toString(), - ) - }, - ) - }) - }) - }) - }) - - describe('#transmit with upkeep batches [ @skip-coverage ]', function () { - const numPassingConditionalUpkeepsArray = [0, 1, 5] - const numPassingLogUpkeepsArray = [0, 1, 5] - const numFailingUpkeepsArray = [0, 3] - - for (let idx = 0; idx < numPassingConditionalUpkeepsArray.length; idx++) { - for (let jdx = 0; jdx < numPassingLogUpkeepsArray.length; jdx++) { - for (let kdx = 0; kdx < numFailingUpkeepsArray.length; kdx++) { - const numPassingConditionalUpkeeps = - numPassingConditionalUpkeepsArray[idx] - const numPassingLogUpkeeps = numPassingLogUpkeepsArray[jdx] - const numFailingUpkeeps = numFailingUpkeepsArray[kdx] - if (numPassingConditionalUpkeeps == 0 && numPassingLogUpkeeps == 0) { - continue - } - it( - '[Conditional:' + - numPassingConditionalUpkeeps + - ',Log:' + - numPassingLogUpkeeps + - ',Failures:' + - numFailingUpkeeps + - '] performs successful upkeeps and does not charge failing upkeeps', - async () => { - const allUpkeeps = await getMultipleUpkeepsDeployedAndFunded( - numPassingConditionalUpkeeps, - numPassingLogUpkeeps, - numFailingUpkeeps, - ) - const passingConditionalUpkeepIds = - allUpkeeps.passingConditionalUpkeepIds - const passingLogUpkeepIds = allUpkeeps.passingLogUpkeepIds - const failingUpkeepIds = allUpkeeps.failingUpkeepIds - - const keeperBefore = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const keeperLinkBefore = await linkToken.balanceOf( - await keeper1.getAddress(), - ) - const registryLinkBefore = await linkToken.balanceOf( - registry.address, - ) - const registryPremiumBefore = (await registry.getState()).state - .totalPremium - const registrationConditionalPassingBefore = await Promise.all( - passingConditionalUpkeepIds.map(async (id) => { - const reg = await registry.getUpkeep(BigNumber.from(id)) - assert.equal(reg.lastPerformedBlockNumber.toString(), '0') - return reg - }), - ) - const registrationLogPassingBefore = await Promise.all( - passingLogUpkeepIds.map(async (id) => { - const reg = await registry.getUpkeep(BigNumber.from(id)) - assert.equal(reg.lastPerformedBlockNumber.toString(), '0') - return reg - }), - ) - const registrationFailingBefore = await Promise.all( - failingUpkeepIds.map(async (id) => { - const reg = await registry.getUpkeep(BigNumber.from(id)) - assert.equal(reg.lastPerformedBlockNumber.toString(), '0') - return reg - }), - ) - - // cancel upkeeps so they will fail in the transmit process - // must call the cancel upkeep as the owner to avoid the CANCELLATION_DELAY - for (let ldx = 0; ldx < failingUpkeepIds.length; ldx++) { - await registry - .connect(owner) - .cancelUpkeep(failingUpkeepIds[ldx]) - } - - const tx = await getTransmitTx( - registry, - keeper1, - passingConditionalUpkeepIds.concat( - passingLogUpkeepIds.concat(failingUpkeepIds), - ), - ) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly numPassingUpkeeps Upkeep Performed should be emitted - assert.equal( - upkeepPerformedLogs.length, - numPassingConditionalUpkeeps + numPassingLogUpkeeps, - ) - const cancelledUpkeepReportLogs = - parseCancelledUpkeepReportLogs(receipt) - // exactly numFailingUpkeeps Upkeep Performed should be emitted - assert.equal(cancelledUpkeepReportLogs.length, numFailingUpkeeps) - - const keeperAfter = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const keeperLinkAfter = await linkToken.balanceOf( - await keeper1.getAddress(), - ) - const registryLinkAfter = await linkToken.balanceOf( - registry.address, - ) - const registrationConditionalPassingAfter = await Promise.all( - passingConditionalUpkeepIds.map(async (id) => { - return await registry.getUpkeep(BigNumber.from(id)) - }), - ) - const registrationLogPassingAfter = await Promise.all( - passingLogUpkeepIds.map(async (id) => { - return await registry.getUpkeep(BigNumber.from(id)) - }), - ) - const registrationFailingAfter = await Promise.all( - failingUpkeepIds.map(async (id) => { - return await registry.getUpkeep(BigNumber.from(id)) - }), - ) - const registryPremiumAfter = (await registry.getState()).state - .totalPremium - const premium = registryPremiumAfter.sub(registryPremiumBefore) - - let netPayment = BigNumber.from('0') - for (let i = 0; i < numPassingConditionalUpkeeps; i++) { - const id = upkeepPerformedLogs[i].args.id - const gasUsed = upkeepPerformedLogs[i].args.gasUsed - const gasOverhead = upkeepPerformedLogs[i].args.gasOverhead - const totalPayment = upkeepPerformedLogs[i].args.totalPayment - - expect(id).to.equal(passingConditionalUpkeepIds[i]) - assert.isTrue(gasUsed.gt(BigNumber.from('0'))) - assert.isTrue(gasOverhead.gt(BigNumber.from('0'))) - assert.isTrue(totalPayment.gt(BigNumber.from('0'))) - - // Balance should be deducted - assert.equal( - registrationConditionalPassingBefore[i].balance - .sub(totalPayment) - .toString(), - registrationConditionalPassingAfter[i].balance.toString(), - ) - - // Amount spent should be updated correctly - assert.equal( - registrationConditionalPassingAfter[i].amountSpent - .sub(totalPayment) - .toString(), - registrationConditionalPassingBefore[ - i - ].amountSpent.toString(), - ) - - // Last perform block number should be updated - assert.equal( - registrationConditionalPassingAfter[ - i - ].lastPerformedBlockNumber.toString(), - tx.blockNumber?.toString(), - ) - - netPayment = netPayment.add(totalPayment) - } - - for (let i = 0; i < numPassingLogUpkeeps; i++) { - const id = - upkeepPerformedLogs[numPassingConditionalUpkeeps + i].args.id - const gasUsed = - upkeepPerformedLogs[numPassingConditionalUpkeeps + i].args - .gasUsed - const gasOverhead = - upkeepPerformedLogs[numPassingConditionalUpkeeps + i].args - .gasOverhead - const totalPayment = - upkeepPerformedLogs[numPassingConditionalUpkeeps + i].args - .totalPayment - - expect(id).to.equal(passingLogUpkeepIds[i]) - assert.isTrue(gasUsed.gt(BigNumber.from('0'))) - assert.isTrue(gasOverhead.gt(BigNumber.from('0'))) - assert.isTrue(totalPayment.gt(BigNumber.from('0'))) - - // Balance should be deducted - assert.equal( - registrationLogPassingBefore[i].balance - .sub(totalPayment) - .toString(), - registrationLogPassingAfter[i].balance.toString(), - ) - - // Amount spent should be updated correctly - assert.equal( - registrationLogPassingAfter[i].amountSpent - .sub(totalPayment) - .toString(), - registrationLogPassingBefore[i].amountSpent.toString(), - ) - - // Last perform block number should not be updated for log triggers - assert.equal( - registrationLogPassingAfter[ - i - ].lastPerformedBlockNumber.toString(), - '0', - ) - - netPayment = netPayment.add(totalPayment) - } - - for (let i = 0; i < numFailingUpkeeps; i++) { - // CancelledUpkeep log should be emitted - const id = cancelledUpkeepReportLogs[i].args.id - expect(id).to.equal(failingUpkeepIds[i]) - - // Balance and amount spent should be same - assert.equal( - registrationFailingBefore[i].balance.toString(), - registrationFailingAfter[i].balance.toString(), - ) - assert.equal( - registrationFailingBefore[i].amountSpent.toString(), - registrationFailingAfter[i].amountSpent.toString(), - ) - - // Last perform block number should not be updated - assert.equal( - registrationFailingAfter[ - i - ].lastPerformedBlockNumber.toString(), - '0', - ) - } - - // Keeper payment is gasPayment + premium / num keepers - const keeperPayment = netPayment - .sub(premium) - .add(premium.div(BigNumber.from(keeperAddresses.length))) - - // Keeper should be paid net payment for all passed upkeeps - assert.equal( - keeperAfter.balance.sub(keeperPayment).toString(), - keeperBefore.balance.toString(), - ) - - assert.isTrue(keeperLinkAfter.eq(keeperLinkBefore)) - assert.isTrue(registryLinkBefore.eq(registryLinkAfter)) - }, - ) - - it( - '[Conditional:' + - numPassingConditionalUpkeeps + - ',Log' + - numPassingLogUpkeeps + - ',Failures:' + - numFailingUpkeeps + - '] splits gas overhead appropriately among performed upkeeps [ @skip-coverage ]', - async () => { - const allUpkeeps = await getMultipleUpkeepsDeployedAndFunded( - numPassingConditionalUpkeeps, - numPassingLogUpkeeps, - numFailingUpkeeps, - ) - const passingConditionalUpkeepIds = - allUpkeeps.passingConditionalUpkeepIds - const passingLogUpkeepIds = allUpkeeps.passingLogUpkeepIds - const failingUpkeepIds = allUpkeeps.failingUpkeepIds - - // Perform the upkeeps once to remove non-zero storage slots and have predictable gas measurement - let tx = await getTransmitTx( - registry, - keeper1, - passingConditionalUpkeepIds.concat( - passingLogUpkeepIds.concat(failingUpkeepIds), - ), - ) - - await tx.wait() - - // cancel upkeeps so they will fail in the transmit process - // must call the cancel upkeep as the owner to avoid the CANCELLATION_DELAY - for (let ldx = 0; ldx < failingUpkeepIds.length; ldx++) { - await registry - .connect(owner) - .cancelUpkeep(failingUpkeepIds[ldx]) - } - - // Do the actual thing - - tx = await getTransmitTx( - registry, - keeper1, - passingConditionalUpkeepIds.concat( - passingLogUpkeepIds.concat(failingUpkeepIds), - ), - ) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly numPassingUpkeeps Upkeep Performed should be emitted - assert.equal( - upkeepPerformedLogs.length, - numPassingConditionalUpkeeps + numPassingLogUpkeeps, - ) - - let netGasUsedPlusChargedOverhead = BigNumber.from('0') - for (let i = 0; i < numPassingConditionalUpkeeps; i++) { - const gasUsed = upkeepPerformedLogs[i].args.gasUsed - const chargedGasOverhead = - upkeepPerformedLogs[i].args.gasOverhead - - assert.isTrue(gasUsed.gt(BigNumber.from('0'))) - assert.isTrue(chargedGasOverhead.gt(BigNumber.from('0'))) - - // Overhead should be same for every upkeep - assert.isTrue( - chargedGasOverhead.eq( - upkeepPerformedLogs[0].args.gasOverhead, - ), - ) - netGasUsedPlusChargedOverhead = netGasUsedPlusChargedOverhead - .add(gasUsed) - .add(chargedGasOverhead) - } - - for (let i = 0; i < numPassingLogUpkeeps; i++) { - const gasUsed = - upkeepPerformedLogs[numPassingConditionalUpkeeps + i].args - .gasUsed - const chargedGasOverhead = - upkeepPerformedLogs[numPassingConditionalUpkeeps + i].args - .gasOverhead - - assert.isTrue(gasUsed.gt(BigNumber.from('0'))) - assert.isTrue(chargedGasOverhead.gt(BigNumber.from('0'))) - - // Overhead should be same for every upkeep - assert.isTrue( - chargedGasOverhead.eq( - upkeepPerformedLogs[numPassingConditionalUpkeeps].args - .gasOverhead, - ), - ) - netGasUsedPlusChargedOverhead = netGasUsedPlusChargedOverhead - .add(gasUsed) - .add(chargedGasOverhead) - } - - console.log( - 'Gas Benchmarking - batching (passedConditionalUpkeeps: ', - numPassingConditionalUpkeeps, - 'passedLogUpkeeps:', - numPassingLogUpkeeps, - 'failedUpkeeps:', - numFailingUpkeeps, - '): ', - numPassingConditionalUpkeeps > 0 - ? 'charged conditional overhead' - : '', - numPassingConditionalUpkeeps > 0 - ? upkeepPerformedLogs[0].args.gasOverhead.toString() - : '', - numPassingLogUpkeeps > 0 ? 'charged log overhead' : '', - numPassingLogUpkeeps > 0 - ? upkeepPerformedLogs[ - numPassingConditionalUpkeeps - ].args.gasOverhead.toString() - : '', - ' margin over gasUsed', - netGasUsedPlusChargedOverhead.sub(receipt.gasUsed).toString(), - ) - - // The total gas charged should be greater than tx gas - assert.isTrue( - netGasUsedPlusChargedOverhead.gt(receipt.gasUsed), - 'Charged gas overhead is too low for batch upkeeps, increase ACCOUNTING_PER_UPKEEP_GAS_OVERHEAD', - ) - }, - ) - } - } - } - - it('has enough perform gas overhead for large batches [ @skip-coverage ]', async () => { - const numUpkeeps = 20 - const upkeepIds: BigNumber[] = [] - let totalPerformGas = BigNumber.from('0') - for (let i = 0; i < numUpkeeps; i++) { - const mock = await upkeepMockFactory.deploy() - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - const testUpkeepId = await getUpkeepID(tx) - upkeepIds.push(testUpkeepId) - - // Add funds to passing upkeeps - await registry.connect(owner).addFunds(testUpkeepId, toWei('10')) - - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(performGas) - - totalPerformGas = totalPerformGas.add(performGas) - } - - // Should revert with no overhead added - await evmRevert( - getTransmitTx(registry, keeper1, upkeepIds, { - gasLimit: totalPerformGas, - }), - ) - // Should not revert with overhead added - await getTransmitTx(registry, keeper1, upkeepIds, { - gasLimit: totalPerformGas.add(transmitGasOverhead), - }) - }) - - it('splits l2 payment among performed upkeeps according to perform data weight', async () => { - const numUpkeeps = 7 - const upkeepIds: BigNumber[] = [] - const performDataSizes = [0, 10, 1000, 50, 33, 69, 420] - const performDatas: string[] = [] - const upkeepCalldataWeights: BigNumber[] = [] - let totalCalldataWeight = BigNumber.from('0') - // Same as MockArbGasInfo.sol - const l1CostWeiArb = BigNumber.from(1000000) - - for (let i = 0; i < numUpkeeps; i++) { - const mock = await upkeepMockFactory.deploy() - const tx = await arbRegistry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - const testUpkeepId = await getUpkeepID(tx) - upkeepIds.push(testUpkeepId) - - // Add funds to passing upkeeps - await arbRegistry.connect(owner).addFunds(testUpkeepId, toWei('100')) - - // Generate performData - let pd = '0x' - for (let j = 0; j < performDataSizes[i]; j++) { - pd += '11' - } - performDatas.push(pd) - const w = BigNumber.from(performDataSizes[i]) - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul( - BigNumber.from(f + 1), - ), - ) - upkeepCalldataWeights.push(w) - totalCalldataWeight = totalCalldataWeight.add(w) - } - - // Do the thing - const tx = await getTransmitTx(arbRegistry, keeper1, upkeepIds, { - gasPrice: gasWei.mul('5'), // High gas price so that it gets capped - performDatas, - }) - - const receipt = await tx.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - // exactly numPassingUpkeeps Upkeep Performed should be emitted - assert.equal(upkeepPerformedLogs.length, numUpkeeps) - - for (let i = 0; i < numUpkeeps; i++) { - const upkeepPerformedLog = upkeepPerformedLogs[i] - - const gasUsed = upkeepPerformedLog.args.gasUsed - const gasOverhead = upkeepPerformedLog.args.gasOverhead - const totalPayment = upkeepPerformedLog.args.totalPayment - - assert.equal( - linkForGas( - gasUsed, - gasOverhead, - gasCeilingMultiplier, - paymentPremiumPPB, - flatFeeMicroLink, - l1CostWeiArb.mul(upkeepCalldataWeights[i]).div(totalCalldataWeight), - ).total.toString(), - totalPayment.toString(), - ) - } - }) - }) - - describe('#recoverFunds', () => { - const sent = toWei('7') - - beforeEach(async () => { - await linkToken.connect(admin).approve(registry.address, toWei('100')) - await linkToken - .connect(owner) - .transfer(await keeper1.getAddress(), toWei('1000')) - - // add funds to upkeep 1 and perform and withdraw some payment - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), emptyBytes, emptyBytes) - - const id1 = await getUpkeepID(tx) - await registry.connect(admin).addFunds(id1, toWei('5')) - - await getTransmitTx(registry, keeper1, [id1]) - await getTransmitTx(registry, keeper2, [id1]) - await getTransmitTx(registry, keeper3, [id1]) - - await registry - .connect(payee1) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ) - - // transfer funds directly to the registry - await linkToken.connect(keeper1).transfer(registry.address, sent) - - // add funds to upkeep 2 and perform and withdraw some payment - const tx2 = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), emptyBytes, emptyBytes) - const id2 = await getUpkeepID(tx2) - await registry.connect(admin).addFunds(id2, toWei('5')) - - await getTransmitTx(registry, keeper1, [id2]) - await getTransmitTx(registry, keeper2, [id2]) - await getTransmitTx(registry, keeper3, [id2]) - - await registry - .connect(payee2) - .withdrawPayment( - await keeper2.getAddress(), - await nonkeeper.getAddress(), - ) - - // transfer funds using onTokenTransfer - const data = ethers.utils.defaultAbiCoder.encode(['uint256'], [id2]) - await linkToken - .connect(owner) - .transferAndCall(registry.address, toWei('1'), data) - - // withdraw some funds - await registry.connect(owner).cancelUpkeep(id1) - await registry - .connect(admin) - .withdrawFunds(id1, await nonkeeper.getAddress()) - }) - - it('reverts if not called by owner', async () => { - await evmRevert( - registry.connect(keeper1).recoverFunds(), - 'Only callable by owner', - ) - }) - - it('allows any funds that have been accidentally transfered to be moved', async () => { - const balanceBefore = await linkToken.balanceOf(registry.address) - const ownerBefore = await linkToken.balanceOf(await owner.getAddress()) - - await registry.connect(owner).recoverFunds() - - const balanceAfter = await linkToken.balanceOf(registry.address) - const ownerAfter = await linkToken.balanceOf(await owner.getAddress()) - - assert.isTrue(balanceBefore.eq(balanceAfter.add(sent))) - assert.isTrue(ownerAfter.eq(ownerBefore.add(sent))) - }) - }) - - describe('#getMinBalanceForUpkeep / #checkUpkeep / #transmit', () => { - it('calculates the minimum balance appropriately', async () => { - await mock.setCanCheck(true) - - const oneWei = BigNumber.from(1) - const minBalance = await registry.getMinBalanceForUpkeep(upkeepId) - const tooLow = minBalance.sub(oneWei) - - await registry.connect(admin).addFunds(upkeepId, tooLow) - let checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.INSUFFICIENT_BALANCE, - ) - - await registry.connect(admin).addFunds(upkeepId, oneWei) - checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - assert.equal(checkUpkeepResult.upkeepNeeded, true) - }) - - it('uses maxPerformData size in checkUpkeep but actual performDataSize in transmit', async () => { - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - const upkeepID = await getUpkeepID(tx) - await mock.setCanCheck(true) - await mock.setCanPerform(true) - - // upkeep is underfunded by 1 wei - const minBalance1 = (await registry.getMinBalanceForUpkeep(upkeepID)).sub( - 1, - ) - await registry.connect(owner).addFunds(upkeepID, minBalance1) - - // upkeep check should return false, 2 should return true - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepID) - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.INSUFFICIENT_BALANCE, - ) - - // however upkeep should perform and pay all the remaining balance - let maxPerformData = '0x' - for (let i = 0; i < maxPerformDataSize.toNumber(); i++) { - maxPerformData += '11' - } - - const tx2 = await getTransmitTx(registry, keeper1, [upkeepID], { - gasPrice: gasWei.mul(gasCeilingMultiplier), - performDatas: [maxPerformData], - }) - - const receipt = await tx2.wait() - const upkeepPerformedLogs = parseUpkeepPerformedLogs(receipt) - assert.equal(upkeepPerformedLogs.length, 1) - }) - }) - - describe('#withdrawFunds', () => { - let upkeepId2: BigNumber - - beforeEach(async () => { - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), randomBytes, '0x') - upkeepId2 = await getUpkeepID(tx) - - await registry.connect(admin).addFunds(upkeepId, toWei('100')) - await registry.connect(admin).addFunds(upkeepId2, toWei('100')) - - // Do a perform so that upkeep is charged some amount - await getTransmitTx(registry, keeper1, [upkeepId]) - await getTransmitTx(registry, keeper1, [upkeepId2]) - }) - - it('reverts if called on a non existing ID', async () => { - await evmRevertCustomError( - registry - .connect(admin) - .withdrawFunds(upkeepId.add(1), await payee1.getAddress()), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if called by anyone but the admin', async () => { - await evmRevertCustomError( - registry - .connect(owner) - .withdrawFunds(upkeepId, await payee1.getAddress()), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if called on an uncanceled upkeep', async () => { - await evmRevertCustomError( - registry - .connect(admin) - .withdrawFunds(upkeepId, await payee1.getAddress()), - registry, - 'UpkeepNotCanceled', - ) - }) - - it('reverts if called with the 0 address', async () => { - await evmRevertCustomError( - registry.connect(admin).withdrawFunds(upkeepId, zeroAddress), - registry, - 'InvalidRecipient', - ) - }) - - describe('after the registration is paused, then cancelled', () => { - it('allows the admin to withdraw', async () => { - const balance = await registry.getBalance(upkeepId) - const payee = await payee1.getAddress() - await registry.connect(admin).pauseUpkeep(upkeepId) - await registry.connect(owner).cancelUpkeep(upkeepId) - await expect(() => - registry.connect(admin).withdrawFunds(upkeepId, payee), - ).to.changeTokenBalance(linkToken, payee1, balance) - }) - }) - - describe('after the registration is cancelled', () => { - beforeEach(async () => { - await registry.connect(owner).cancelUpkeep(upkeepId) - await registry.connect(owner).cancelUpkeep(upkeepId2) - }) - - it('can be called successively on two upkeeps', async () => { - await registry - .connect(admin) - .withdrawFunds(upkeepId, await payee1.getAddress()) - await registry - .connect(admin) - .withdrawFunds(upkeepId2, await payee1.getAddress()) - }) - - it('moves the funds out and updates the balance and emits an event', async () => { - const payee1Before = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const registryBefore = await linkToken.balanceOf(registry.address) - - let registration = await registry.getUpkeep(upkeepId) - const previousBalance = registration.balance - - const tx = await registry - .connect(admin) - .withdrawFunds(upkeepId, await payee1.getAddress()) - await expect(tx) - .to.emit(registry, 'FundsWithdrawn') - .withArgs(upkeepId, previousBalance, await payee1.getAddress()) - - const payee1After = await linkToken.balanceOf(await payee1.getAddress()) - const registryAfter = await linkToken.balanceOf(registry.address) - - assert.isTrue(payee1Before.add(previousBalance).eq(payee1After)) - assert.isTrue(registryBefore.sub(previousBalance).eq(registryAfter)) - - registration = await registry.getUpkeep(upkeepId) - assert.equal(0, registration.balance.toNumber()) - }) - }) - }) - - describe('#simulatePerformUpkeep', () => { - it('reverts if called by non zero address', async () => { - await evmRevertCustomError( - registry - .connect(await owner.getAddress()) - .callStatic.simulatePerformUpkeep(upkeepId, '0x'), - registry, - 'OnlySimulatedBackend', - ) - }) - - it('reverts when registry is paused', async () => { - await registry.connect(owner).pause() - await evmRevertCustomError( - registry - .connect(zeroAddress) - .callStatic.simulatePerformUpkeep(upkeepId, '0x'), - registry, - 'RegistryPaused', - ) - }) - - it('returns false and gasUsed when perform fails', async () => { - await mock.setCanPerform(false) - - const simulatePerformResult = await registry - .connect(zeroAddress) - .callStatic.simulatePerformUpkeep(upkeepId, '0x') - - assert.equal(simulatePerformResult.success, false) - assert.isTrue(simulatePerformResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - }) - - it('returns true, gasUsed, and performGas when perform succeeds', async () => { - await mock.setCanPerform(true) - - const simulatePerformResult = await registry - .connect(zeroAddress) - .callStatic.simulatePerformUpkeep(upkeepId, '0x') - - assert.equal(simulatePerformResult.success, true) - assert.isTrue(simulatePerformResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - }) - - it('returns correct amount of gasUsed when perform succeeds', async () => { - await mock.setCanPerform(true) - await mock.setPerformGasToBurn(performGas) - - const simulatePerformResult = await registry - .connect(zeroAddress) - .callStatic.simulatePerformUpkeep(upkeepId, '0x') - - assert.equal(simulatePerformResult.success, true) - // Full execute gas should be used, with some performGasBuffer(1000) - assert.isTrue( - simulatePerformResult.gasUsed.gt( - performGas.sub(BigNumber.from('1000')), - ), - ) - }) - }) - - describe('#checkUpkeep', () => { - it('reverts if called by non zero address', async () => { - await evmRevertCustomError( - registry - .connect(await owner.getAddress()) - .callStatic['checkUpkeep(uint256)'](upkeepId), - registry, - 'OnlySimulatedBackend', - ) - }) - - it('returns false and error code if the upkeep is cancelled by admin', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.UPKEEP_CANCELLED, - ) - expect(checkUpkeepResult.gasUsed).to.equal(0) - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns false and error code if the upkeep is cancelled by owner', async () => { - await registry.connect(owner).cancelUpkeep(upkeepId) - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.UPKEEP_CANCELLED, - ) - expect(checkUpkeepResult.gasUsed).to.equal(0) - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns false and error code if the registry is paused', async () => { - await registry.connect(owner).pause() - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.REGISTRY_PAUSED, - ) - expect(checkUpkeepResult.gasUsed).to.equal(0) - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns false and error code if the upkeep is paused', async () => { - await registry.connect(admin).pauseUpkeep(upkeepId) - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.UPKEEP_PAUSED, - ) - expect(checkUpkeepResult.gasUsed).to.equal(0) - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns false and error code if user is out of funds', async () => { - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.INSUFFICIENT_BALANCE, - ) - expect(checkUpkeepResult.gasUsed).to.equal(0) - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - context('when the registration is funded', () => { - beforeEach(async () => { - await linkToken.connect(admin).approve(registry.address, toWei('200')) - await registry.connect(admin).addFunds(upkeepId, toWei('100')) - await registry.connect(admin).addFunds(logUpkeepId, toWei('100')) - }) - - it('returns false, error code, and revert data if the target check reverts', async () => { - await mock.setShouldRevertCheck(true) - await mock.setCheckRevertReason( - 'custom revert error, clever way to insert offchain data', - ) - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - assert.equal(checkUpkeepResult.upkeepNeeded, false) - - const revertReasonBytes = `0x${checkUpkeepResult.performData.slice(10)}` // remove sighash - assert.equal( - ethers.utils.defaultAbiCoder.decode(['string'], revertReasonBytes)[0], - 'custom revert error, clever way to insert offchain data', - ) - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.TARGET_CHECK_REVERTED, - ) - assert.isTrue(checkUpkeepResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - // Feed data should be returned here - assert.isTrue(checkUpkeepResult.fastGasWei.gt(BigNumber.from('0'))) - assert.isTrue(checkUpkeepResult.linkNative.gt(BigNumber.from('0'))) - }) - - it('returns false, error code, and no revert data if the target check revert data exceeds maxRevertDataSize', async () => { - await mock.setShouldRevertCheck(true) - let longRevertReason = '' - for (let i = 0; i <= maxRevertDataSize.toNumber(); i++) { - longRevertReason += 'x' - } - await mock.setCheckRevertReason(longRevertReason) - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - assert.equal(checkUpkeepResult.upkeepNeeded, false) - - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.REVERT_DATA_EXCEEDS_LIMIT, - ) - assert.isTrue(checkUpkeepResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns false and error code if the upkeep is not needed', async () => { - await mock.setCanCheck(false) - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.UPKEEP_NOT_NEEDED, - ) - assert.isTrue(checkUpkeepResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns false and error code if the performData exceeds limit', async () => { - let longBytes = '0x' - for (let i = 0; i < 5000; i++) { - longBytes += '1' - } - await mock.setCanCheck(true) - await mock.setPerformData(longBytes) - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId) - - assert.equal(checkUpkeepResult.upkeepNeeded, false) - assert.equal(checkUpkeepResult.performData, '0x') - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.PERFORM_DATA_EXCEEDS_LIMIT, - ) - assert.isTrue(checkUpkeepResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - }) - - it('returns true with gas used if the target can execute', async () => { - await mock.setCanCheck(true) - await mock.setPerformData(randomBytes) - - const latestBlock = await ethers.provider.getBlock('latest') - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId, { - blockTag: latestBlock.number, - }) - - assert.equal(checkUpkeepResult.upkeepNeeded, true) - assert.equal(checkUpkeepResult.performData, randomBytes) - assert.equal( - checkUpkeepResult.upkeepFailureReason, - UpkeepFailureReason.NONE, - ) - assert.isTrue(checkUpkeepResult.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - expect(checkUpkeepResult.gasLimit).to.equal(performGas) - assert.isTrue(checkUpkeepResult.fastGasWei.eq(gasWei)) - assert.isTrue(checkUpkeepResult.linkNative.eq(linkEth)) - }) - - it('calls checkLog for log-trigger upkeeps', async () => { - const log: Log = { - index: 0, - timestamp: 0, - txHash: ethers.utils.randomBytes(32), - blockNumber: 100, - blockHash: ethers.utils.randomBytes(32), - source: randomAddress(), - topics: [ethers.utils.randomBytes(32), ethers.utils.randomBytes(32)], - data: ethers.utils.randomBytes(1000), - } - - await ltUpkeep.mock.checkLog.withArgs(log, '0x').returns(true, '0x1234') - - const checkData = encodeLog(log) - - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256,bytes)'](logUpkeepId, checkData) - - expect(checkUpkeepResult.upkeepNeeded).to.be.true - expect(checkUpkeepResult.performData).to.equal('0x1234') - }) - - itMaybe( - 'has a large enough gas overhead to cover upkeeps that use all their gas [ @skip-coverage ]', - async () => { - await mock.setCanCheck(true) - await mock.setCheckGasToBurn(checkGasLimit) - const gas = checkGasLimit.add(checkGasOverhead) - const checkUpkeepResult = await registry - .connect(zeroAddress) - .callStatic['checkUpkeep(uint256)'](upkeepId, { - gasLimit: gas, - }) - - assert.equal(checkUpkeepResult.upkeepNeeded, true) - }, - ) - }) - }) - - describe('#addFunds', () => { - const amount = toWei('1') - - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry.connect(keeper1).addFunds(upkeepId.add(1), amount), - registry, - 'UpkeepCancelled', - ) - }) - - it('adds to the balance of the registration', async () => { - await registry.connect(admin).addFunds(upkeepId, amount) - const registration = await registry.getUpkeep(upkeepId) - assert.isTrue(amount.eq(registration.balance)) - }) - - it('lets anyone add funds to an upkeep not just admin', async () => { - await linkToken.connect(owner).transfer(await payee1.getAddress(), amount) - await linkToken.connect(payee1).approve(registry.address, amount) - - await registry.connect(payee1).addFunds(upkeepId, amount) - const registration = await registry.getUpkeep(upkeepId) - assert.isTrue(amount.eq(registration.balance)) - }) - - it('emits a log', async () => { - const tx = await registry.connect(admin).addFunds(upkeepId, amount) - await expect(tx) - .to.emit(registry, 'FundsAdded') - .withArgs(upkeepId, await admin.getAddress(), amount) - }) - - it('reverts if the upkeep is canceled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - await evmRevertCustomError( - registry.connect(keeper1).addFunds(upkeepId, amount), - registry, - 'UpkeepCancelled', - ) - }) - }) - - describe('#getActiveUpkeepIDs', () => { - it('reverts if startIndex is out of bounds ', async () => { - await evmRevertCustomError( - registry.getActiveUpkeepIDs(numUpkeeps, 0), - registry, - 'IndexOutOfRange', - ) - await evmRevertCustomError( - registry.getActiveUpkeepIDs(numUpkeeps + 1, 0), - registry, - 'IndexOutOfRange', - ) - }) - - it('returns upkeep IDs bounded by maxCount', async () => { - let upkeepIds = await registry.getActiveUpkeepIDs(0, 1) - assert(upkeepIds.length == 1) - assert(upkeepIds[0].eq(upkeepId)) - upkeepIds = await registry.getActiveUpkeepIDs(1, 3) - assert(upkeepIds.length == 3) - expect(upkeepIds).to.deep.equal([ - afUpkeepId, - logUpkeepId, - streamsLookupUpkeepId, - ]) - }) - - it('returns as many ids as possible if maxCount > num available', async () => { - const upkeepIds = await registry.getActiveUpkeepIDs(1, numUpkeeps + 100) - assert(upkeepIds.length == numUpkeeps - 1) - }) - - it('returns all upkeep IDs if maxCount is 0', async () => { - let upkeepIds = await registry.getActiveUpkeepIDs(0, 0) - assert(upkeepIds.length == numUpkeeps) - upkeepIds = await registry.getActiveUpkeepIDs(2, 0) - assert(upkeepIds.length == numUpkeeps - 2) - }) - }) - - describe('#getMaxPaymentForGas', () => { - let maxl1CostWeiArbWithoutMultiplier: BigNumber - let maxl1CostWeiOptWithoutMultiplier: BigNumber - - beforeEach(async () => { - const arbL1PriceinWei = BigNumber.from(1000) // Same as MockArbGasInfo.sol - maxl1CostWeiArbWithoutMultiplier = arbL1PriceinWei.mul( - maxPerformDataSize - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul( - BigNumber.from(f + 1), - ), - ), - ) - maxl1CostWeiOptWithoutMultiplier = BigNumber.from(2000000) // Same as MockOVMGasPriceOracle.sol - }) - - itMaybe('calculates the max fee appropriately', async () => { - await verifyMaxPayment(registry, chainModuleBase) - }) - - itMaybe('calculates the max fee appropriately for Arbitrum', async () => { - await verifyMaxPayment( - arbRegistry, - arbitrumModule, - maxl1CostWeiArbWithoutMultiplier, - ) - }) - - itMaybe('calculates the max fee appropriately for Optimism', async () => { - await verifyMaxPayment( - opRegistry, - optimismModule, - maxl1CostWeiOptWithoutMultiplier, - ) - }) - - it('uses the fallback gas price if the feed has issues', async () => { - const chainModuleOverheads = await chainModuleBase.getGasOverhead() - const expectedFallbackMaxPayment = linkForGas( - performGas, - registryConditionalOverhead - .add(registryPerSignerGasOverhead.mul(f + 1)) - .add( - maxPerformDataSize - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul( - BigNumber.from(f + 1), - ), - ) - .mul( - registryPerPerformByteGasOverhead.add( - chainModuleOverheads.chainModulePerByteOverhead, - ), - ), - ) - .add(chainModuleOverheads.chainModuleFixedOverhead), - gasCeilingMultiplier.mul('2'), // fallbackGasPrice is 2x gas price - paymentPremiumPPB, - flatFeeMicroLink, - ).total - - // Stale feed - let roundId = 99 - const answer = 100 - let updatedAt = 946684800 // New Years 2000 🥳 - let startedAt = 946684799 - await gasPriceFeed - .connect(owner) - .updateRoundData(roundId, answer, updatedAt, startedAt) - - assert.equal( - expectedFallbackMaxPayment.toString(), - ( - await registry.getMaxPaymentForGas(Trigger.CONDITION, performGas) - ).toString(), - ) - - // Negative feed price - roundId = 100 - updatedAt = now() - startedAt = 946684799 - await gasPriceFeed - .connect(owner) - .updateRoundData(roundId, -100, updatedAt, startedAt) - - assert.equal( - expectedFallbackMaxPayment.toString(), - ( - await registry.getMaxPaymentForGas(Trigger.CONDITION, performGas) - ).toString(), - ) - - // Zero feed price - roundId = 101 - updatedAt = now() - startedAt = 946684799 - await gasPriceFeed - .connect(owner) - .updateRoundData(roundId, 0, updatedAt, startedAt) - - assert.equal( - expectedFallbackMaxPayment.toString(), - ( - await registry.getMaxPaymentForGas(Trigger.CONDITION, performGas) - ).toString(), - ) - }) - - it('uses the fallback link price if the feed has issues', async () => { - const chainModuleOverheads = await chainModuleBase.getGasOverhead() - const expectedFallbackMaxPayment = linkForGas( - performGas, - registryConditionalOverhead - .add(registryPerSignerGasOverhead.mul(f + 1)) - .add( - maxPerformDataSize - .add(registryTransmitCalldataFixedBytesOverhead) - .add( - registryTransmitCalldataPerSignerBytesOverhead.mul( - BigNumber.from(f + 1), - ), - ) - .mul( - registryPerPerformByteGasOverhead.add( - chainModuleOverheads.chainModulePerByteOverhead, - ), - ), - ) - .add(chainModuleOverheads.chainModuleFixedOverhead), - gasCeilingMultiplier.mul('2'), // fallbackLinkPrice is 1/2 link price, so multiply by 2 - paymentPremiumPPB, - flatFeeMicroLink, - ).total - - // Stale feed - let roundId = 99 - const answer = 100 - let updatedAt = 946684800 // New Years 2000 🥳 - let startedAt = 946684799 - await linkEthFeed - .connect(owner) - .updateRoundData(roundId, answer, updatedAt, startedAt) - - assert.equal( - expectedFallbackMaxPayment.toString(), - ( - await registry.getMaxPaymentForGas(Trigger.CONDITION, performGas) - ).toString(), - ) - - // Negative feed price - roundId = 100 - updatedAt = now() - startedAt = 946684799 - await linkEthFeed - .connect(owner) - .updateRoundData(roundId, -100, updatedAt, startedAt) - - assert.equal( - expectedFallbackMaxPayment.toString(), - ( - await registry.getMaxPaymentForGas(Trigger.CONDITION, performGas) - ).toString(), - ) - - // Zero feed price - roundId = 101 - updatedAt = now() - startedAt = 946684799 - await linkEthFeed - .connect(owner) - .updateRoundData(roundId, 0, updatedAt, startedAt) - - assert.equal( - expectedFallbackMaxPayment.toString(), - ( - await registry.getMaxPaymentForGas(Trigger.CONDITION, performGas) - ).toString(), - ) - }) - }) - - describe('#typeAndVersion', () => { - it('uses the correct type and version', async () => { - const typeAndVersion = await registry.typeAndVersion() - assert.equal(typeAndVersion, 'AutomationRegistry 2.2.0') - }) - }) - - describe('#onTokenTransfer', () => { - const amount = toWei('1') - - it('reverts if not called by the LINK token', async () => { - const data = ethers.utils.defaultAbiCoder.encode(['uint256'], [upkeepId]) - - await evmRevertCustomError( - registry - .connect(keeper1) - .onTokenTransfer(await keeper1.getAddress(), amount, data), - registry, - 'OnlyCallableByLINKToken', - ) - }) - - it('reverts if not called with more or less than 32 bytes', async () => { - const longData = ethers.utils.defaultAbiCoder.encode( - ['uint256', 'uint256'], - ['33', '34'], - ) - const shortData = '0x12345678' - - await evmRevert( - linkToken - .connect(owner) - .transferAndCall(registry.address, amount, longData), - ) - await evmRevert( - linkToken - .connect(owner) - .transferAndCall(registry.address, amount, shortData), - ) - }) - - it('reverts if the upkeep is canceled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - await evmRevertCustomError( - registry.connect(keeper1).addFunds(upkeepId, amount), - registry, - 'UpkeepCancelled', - ) - }) - - it('updates the funds of the job id passed', async () => { - const data = ethers.utils.defaultAbiCoder.encode(['uint256'], [upkeepId]) - - const before = (await registry.getUpkeep(upkeepId)).balance - await linkToken - .connect(owner) - .transferAndCall(registry.address, amount, data) - const after = (await registry.getUpkeep(upkeepId)).balance - - assert.isTrue(before.add(amount).eq(after)) - }) - }) - - describeMaybe('#setConfig - onchain', async () => { - const payment = BigNumber.from(1) - const flatFee = BigNumber.from(2) - const maxGas = BigNumber.from(6) - const staleness = BigNumber.from(4) - const ceiling = BigNumber.from(5) - const newMinUpkeepSpend = BigNumber.from(9) - const newMaxCheckDataSize = BigNumber.from(10000) - const newMaxPerformDataSize = BigNumber.from(10000) - const newMaxRevertDataSize = BigNumber.from(10000) - const newMaxPerformGas = BigNumber.from(10000000) - const fbGasEth = BigNumber.from(7) - const fbLinkEth = BigNumber.from(8) - const newTranscoder = randomAddress() - const newRegistrars = [randomAddress(), randomAddress()] - const upkeepManager = randomAddress() - - const newConfig = { - paymentPremiumPPB: payment, - flatFeeMicroLink: flatFee, - checkGasLimit: maxGas, - stalenessSeconds: staleness, - gasCeilingMultiplier: ceiling, - minUpkeepSpend: newMinUpkeepSpend, - maxCheckDataSize: newMaxCheckDataSize, - maxPerformDataSize: newMaxPerformDataSize, - maxRevertDataSize: newMaxRevertDataSize, - maxPerformGas: newMaxPerformGas, - fallbackGasPrice: fbGasEth, - fallbackLinkPrice: fbLinkEth, - transcoder: newTranscoder, - registrars: newRegistrars, - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModuleBase.address, - reorgProtectionEnabled: true, - } - - it('reverts when called by anyone but the proposed owner', async () => { - await evmRevert( - registry - .connect(payee1) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ), - 'Only callable by owner', - ) - }) - - it('reverts if signers or transmitters are the zero address', async () => { - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - [randomAddress(), randomAddress(), randomAddress(), zeroAddress], - [ - randomAddress(), - randomAddress(), - randomAddress(), - randomAddress(), - ], - f, - newConfig, - offchainVersion, - offchainBytes, - ), - registry, - 'InvalidSigner', - ) - - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - [ - randomAddress(), - randomAddress(), - randomAddress(), - randomAddress(), - ], - [randomAddress(), randomAddress(), randomAddress(), zeroAddress], - f, - newConfig, - offchainVersion, - offchainBytes, - ), - registry, - 'InvalidTransmitter', - ) - }) - - it('updates the onchainConfig and configDigest', async () => { - const old = await registry.getState() - const oldConfig = old.config - const oldState = old.state - assert.isTrue(paymentPremiumPPB.eq(oldConfig.paymentPremiumPPB)) - assert.isTrue(flatFeeMicroLink.eq(oldConfig.flatFeeMicroLink)) - assert.isTrue(stalenessSeconds.eq(oldConfig.stalenessSeconds)) - assert.isTrue(gasCeilingMultiplier.eq(oldConfig.gasCeilingMultiplier)) - - await registry - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ) - - const updated = await registry.getState() - const updatedConfig = updated.config - const updatedState = updated.state - assert.equal(updatedConfig.paymentPremiumPPB, payment.toNumber()) - assert.equal(updatedConfig.flatFeeMicroLink, flatFee.toNumber()) - assert.equal(updatedConfig.stalenessSeconds, staleness.toNumber()) - assert.equal(updatedConfig.gasCeilingMultiplier, ceiling.toNumber()) - assert.equal( - updatedConfig.minUpkeepSpend.toString(), - newMinUpkeepSpend.toString(), - ) - assert.equal( - updatedConfig.maxCheckDataSize, - newMaxCheckDataSize.toNumber(), - ) - assert.equal( - updatedConfig.maxPerformDataSize, - newMaxPerformDataSize.toNumber(), - ) - assert.equal( - updatedConfig.maxRevertDataSize, - newMaxRevertDataSize.toNumber(), - ) - assert.equal(updatedConfig.maxPerformGas, newMaxPerformGas.toNumber()) - assert.equal(updatedConfig.checkGasLimit, maxGas.toNumber()) - assert.equal( - updatedConfig.fallbackGasPrice.toNumber(), - fbGasEth.toNumber(), - ) - assert.equal( - updatedConfig.fallbackLinkPrice.toNumber(), - fbLinkEth.toNumber(), - ) - assert.equal(updatedState.latestEpoch, 0) - - assert(oldState.configCount + 1 == updatedState.configCount) - assert( - oldState.latestConfigBlockNumber != - updatedState.latestConfigBlockNumber, - ) - assert(oldState.latestConfigDigest != updatedState.latestConfigDigest) - - assert.equal(updatedConfig.transcoder, newTranscoder) - assert.deepEqual(updatedConfig.registrars, newRegistrars) - assert.equal(updatedConfig.upkeepPrivilegeManager, upkeepManager) - }) - - it('maintains paused state when config is changed', async () => { - await registry.pause() - const old = await registry.getState() - assert.isTrue(old.state.paused) - - await registry - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ) - - const updated = await registry.getState() - assert.isTrue(updated.state.paused) - }) - - it('emits an event', async () => { - const tx = await registry - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - newConfig, - offchainVersion, - offchainBytes, - ) - await expect(tx).to.emit(registry, 'ConfigSet') - }) - }) - - describe('#setConfig - offchain', () => { - let newKeepers: string[] - - beforeEach(async () => { - newKeepers = [ - await personas.Eddy.getAddress(), - await personas.Nick.getAddress(), - await personas.Neil.getAddress(), - await personas.Carol.getAddress(), - ] - }) - - it('reverts when called by anyone but the owner', async () => { - await evmRevert( - registry - .connect(payee1) - .setConfigTypeSafe( - newKeepers, - newKeepers, - f, - config, - offchainVersion, - offchainBytes, - ), - 'Only callable by owner', - ) - }) - - it('reverts if too many keeperAddresses set', async () => { - for (let i = 0; i < 40; i++) { - newKeepers.push(randomAddress()) - } - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - newKeepers, - newKeepers, - f, - config, - offchainVersion, - offchainBytes, - ), - registry, - 'TooManyOracles', - ) - }) - - it('reverts if f=0', async () => { - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - newKeepers, - newKeepers, - 0, - config, - offchainVersion, - offchainBytes, - ), - registry, - 'IncorrectNumberOfFaultyOracles', - ) - }) - - it('reverts if signers != transmitters length', async () => { - const signers = [randomAddress()] - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - signers, - newKeepers, - f, - config, - offchainVersion, - offchainBytes, - ), - registry, - 'IncorrectNumberOfSigners', - ) - }) - - it('reverts if signers <= 3f', async () => { - newKeepers.pop() - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - newKeepers, - newKeepers, - f, - config, - offchainVersion, - offchainBytes, - ), - registry, - 'IncorrectNumberOfSigners', - ) - }) - - it('reverts on repeated signers', async () => { - const newSigners = [ - await personas.Eddy.getAddress(), - await personas.Eddy.getAddress(), - await personas.Eddy.getAddress(), - await personas.Eddy.getAddress(), - ] - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - newSigners, - newKeepers, - f, - config, - offchainVersion, - offchainBytes, - ), - registry, - 'RepeatedSigner', - ) - }) - - it('reverts on repeated transmitters', async () => { - const newTransmitters = [ - await personas.Eddy.getAddress(), - await personas.Eddy.getAddress(), - await personas.Eddy.getAddress(), - await personas.Eddy.getAddress(), - ] - await evmRevertCustomError( - registry - .connect(owner) - .setConfigTypeSafe( - newKeepers, - newTransmitters, - f, - config, - offchainVersion, - offchainBytes, - ), - registry, - 'RepeatedTransmitter', - ) - }) - - itMaybe('stores new config and emits event', async () => { - // Perform an upkeep so that totalPremium is updated - await registry.connect(admin).addFunds(upkeepId, toWei('100')) - let tx = await getTransmitTx(registry, keeper1, [upkeepId]) - await tx.wait() - - const newOffChainVersion = BigNumber.from('2') - const newOffChainConfig = '0x1122' - - const old = await registry.getState() - const oldState = old.state - assert(oldState.totalPremium.gt(BigNumber.from('0'))) - - const newSigners = newKeepers - tx = await registry - .connect(owner) - .setConfigTypeSafe( - newSigners, - newKeepers, - f, - config, - newOffChainVersion, - newOffChainConfig, - ) - - const updated = await registry.getState() - const updatedState = updated.state - assert(oldState.totalPremium.eq(updatedState.totalPremium)) - - // Old signer addresses which are not in new signers should be non active - for (let i = 0; i < signerAddresses.length; i++) { - const signer = signerAddresses[i] - if (!newSigners.includes(signer)) { - assert(!(await registry.getSignerInfo(signer)).active) - assert((await registry.getSignerInfo(signer)).index == 0) - } - } - // New signer addresses should be active - for (let i = 0; i < newSigners.length; i++) { - const signer = newSigners[i] - assert((await registry.getSignerInfo(signer)).active) - assert((await registry.getSignerInfo(signer)).index == i) - } - // Old transmitter addresses which are not in new transmitter should be non active, update lastCollected but retain other info - for (let i = 0; i < keeperAddresses.length; i++) { - const transmitter = keeperAddresses[i] - if (!newKeepers.includes(transmitter)) { - assert(!(await registry.getTransmitterInfo(transmitter)).active) - assert((await registry.getTransmitterInfo(transmitter)).index == i) - assert( - (await registry.getTransmitterInfo(transmitter)).lastCollected.eq( - oldState.totalPremium.sub( - oldState.totalPremium.mod(keeperAddresses.length), - ), - ), - ) - } - } - // New transmitter addresses should be active - for (let i = 0; i < newKeepers.length; i++) { - const transmitter = newKeepers[i] - assert((await registry.getTransmitterInfo(transmitter)).active) - assert((await registry.getTransmitterInfo(transmitter)).index == i) - assert( - (await registry.getTransmitterInfo(transmitter)).lastCollected.eq( - oldState.totalPremium, - ), - ) - } - - // config digest should be updated - assert(oldState.configCount + 1 == updatedState.configCount) - assert( - oldState.latestConfigBlockNumber != - updatedState.latestConfigBlockNumber, - ) - assert(oldState.latestConfigDigest != updatedState.latestConfigDigest) - - //New config should be updated - assert.deepEqual(updated.signers, newKeepers) - assert.deepEqual(updated.transmitters, newKeepers) - - // Event should have been emitted - await expect(tx).to.emit(registry, 'ConfigSet') - }) - }) - - describe('#setPeerRegistryMigrationPermission() / #getPeerRegistryMigrationPermission()', () => { - const peer = randomAddress() - it('allows the owner to set the peer registries', async () => { - let permission = await registry.getPeerRegistryMigrationPermission(peer) - expect(permission).to.equal(0) - await registry.setPeerRegistryMigrationPermission(peer, 1) - permission = await registry.getPeerRegistryMigrationPermission(peer) - expect(permission).to.equal(1) - await registry.setPeerRegistryMigrationPermission(peer, 2) - permission = await registry.getPeerRegistryMigrationPermission(peer) - expect(permission).to.equal(2) - await registry.setPeerRegistryMigrationPermission(peer, 0) - permission = await registry.getPeerRegistryMigrationPermission(peer) - expect(permission).to.equal(0) - }) - it('reverts if passed an unsupported permission', async () => { - await expect( - registry.connect(admin).setPeerRegistryMigrationPermission(peer, 10), - ).to.be.reverted - }) - it('reverts if not called by the owner', async () => { - await expect( - registry.connect(admin).setPeerRegistryMigrationPermission(peer, 1), - ).to.be.revertedWith('Only callable by owner') - }) - }) - - describe('#registerUpkeep', () => { - it('reverts when registry is paused', async () => { - await registry.connect(owner).pause() - await evmRevertCustomError( - registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), emptyBytes, '0x'), - registry, - 'RegistryPaused', - ) - }) - - it('reverts if the target is not a contract', async () => { - await evmRevertCustomError( - registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](zeroAddress, performGas, await admin.getAddress(), emptyBytes, '0x'), - registry, - 'NotAContract', - ) - }) - - it('reverts if called by a non-owner', async () => { - await evmRevertCustomError( - registry - .connect(keeper1) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), emptyBytes, '0x'), - registry, - 'OnlyCallableByOwnerOrRegistrar', - ) - }) - - it('reverts if execute gas is too low', async () => { - await evmRevertCustomError( - registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, 2299, await admin.getAddress(), emptyBytes, '0x'), - registry, - 'GasLimitOutsideRange', - ) - }) - - it('reverts if execute gas is too high', async () => { - await evmRevertCustomError( - registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, 5000001, await admin.getAddress(), emptyBytes, '0x'), - registry, - 'GasLimitOutsideRange', - ) - }) - - it('reverts if checkData is too long', async () => { - let longBytes = '0x' - for (let i = 0; i < 10000; i++) { - longBytes += '1' - } - await evmRevertCustomError( - registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), longBytes, '0x'), - registry, - 'CheckDataExceedsLimit', - ) - }) - - it('creates a record of the registration', async () => { - const performGases = [100000, 500000] - const checkDatas = [emptyBytes, '0x12'] - - for (let jdx = 0; jdx < performGases.length; jdx++) { - const performGas = performGases[jdx] - for (let kdx = 0; kdx < checkDatas.length; kdx++) { - const checkData = checkDatas[kdx] - const tx = await registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), checkData, '0x') - - //confirm the upkeep details and verify emitted events - const testUpkeepId = await getUpkeepID(tx) - await expect(tx) - .to.emit(registry, 'UpkeepRegistered') - .withArgs(testUpkeepId, performGas, await admin.getAddress()) - - await expect(tx) - .to.emit(registry, 'UpkeepCheckDataSet') - .withArgs(testUpkeepId, checkData) - await expect(tx) - .to.emit(registry, 'UpkeepTriggerConfigSet') - .withArgs(testUpkeepId, '0x') - - const registration = await registry.getUpkeep(testUpkeepId) - - assert.equal(mock.address, registration.target) - assert.notEqual( - ethers.constants.AddressZero, - await registry.getForwarder(testUpkeepId), - ) - assert.equal( - performGas.toString(), - registration.performGas.toString(), - ) - assert.equal(await admin.getAddress(), registration.admin) - assert.equal(0, registration.balance.toNumber()) - assert.equal(0, registration.amountSpent.toNumber()) - assert.equal(0, registration.lastPerformedBlockNumber) - assert.equal(checkData, registration.checkData) - assert.equal(registration.paused, false) - assert.equal(registration.offchainConfig, '0x') - assert(registration.maxValidBlocknumber.eq('0xffffffff')) - } - } - }) - }) - - describe('#pauseUpkeep', () => { - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry.connect(keeper1).pauseUpkeep(upkeepId.add(1)), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the upkeep is already canceled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - await evmRevertCustomError( - registry.connect(admin).pauseUpkeep(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - - it('reverts if the upkeep is already paused', async () => { - await registry.connect(admin).pauseUpkeep(upkeepId) - - await evmRevertCustomError( - registry.connect(admin).pauseUpkeep(upkeepId), - registry, - 'OnlyUnpausedUpkeep', - ) - }) - - it('reverts if the caller is not the upkeep admin', async () => { - await evmRevertCustomError( - registry.connect(keeper1).pauseUpkeep(upkeepId), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('pauses the upkeep and emits an event', async () => { - const tx = await registry.connect(admin).pauseUpkeep(upkeepId) - await expect(tx).to.emit(registry, 'UpkeepPaused').withArgs(upkeepId) - - const registration = await registry.getUpkeep(upkeepId) - assert.equal(registration.paused, true) - }) - }) - - describe('#unpauseUpkeep', () => { - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry.connect(keeper1).unpauseUpkeep(upkeepId.add(1)), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the upkeep is already canceled', async () => { - await registry.connect(owner).cancelUpkeep(upkeepId) - - await evmRevertCustomError( - registry.connect(admin).unpauseUpkeep(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - - it('marks the contract as paused', async () => { - assert.isFalse((await registry.getState()).state.paused) - - await registry.connect(owner).pause() - - assert.isTrue((await registry.getState()).state.paused) - }) - - it('reverts if the upkeep is not paused', async () => { - await evmRevertCustomError( - registry.connect(admin).unpauseUpkeep(upkeepId), - registry, - 'OnlyPausedUpkeep', - ) - }) - - it('reverts if the caller is not the upkeep admin', async () => { - await registry.connect(admin).pauseUpkeep(upkeepId) - - const registration = await registry.getUpkeep(upkeepId) - - assert.equal(registration.paused, true) - - await evmRevertCustomError( - registry.connect(keeper1).unpauseUpkeep(upkeepId), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('unpauses the upkeep and emits an event', async () => { - const originalCount = (await registry.getActiveUpkeepIDs(0, 0)).length - - await registry.connect(admin).pauseUpkeep(upkeepId) - - const tx = await registry.connect(admin).unpauseUpkeep(upkeepId) - - await expect(tx).to.emit(registry, 'UpkeepUnpaused').withArgs(upkeepId) - - const registration = await registry.getUpkeep(upkeepId) - assert.equal(registration.paused, false) - - const upkeepIds = await registry.getActiveUpkeepIDs(0, 0) - assert.equal(upkeepIds.length, originalCount) - }) - }) - - describe('#setUpkeepCheckData', () => { - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry - .connect(keeper1) - .setUpkeepCheckData(upkeepId.add(1), randomBytes), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the caller is not upkeep admin', async () => { - await evmRevertCustomError( - registry.connect(keeper1).setUpkeepCheckData(upkeepId, randomBytes), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the upkeep is cancelled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - await evmRevertCustomError( - registry.connect(admin).setUpkeepCheckData(upkeepId, randomBytes), - registry, - 'UpkeepCancelled', - ) - }) - - it('is allowed to update on paused upkeep', async () => { - await registry.connect(admin).pauseUpkeep(upkeepId) - await registry.connect(admin).setUpkeepCheckData(upkeepId, randomBytes) - - const registration = await registry.getUpkeep(upkeepId) - assert.equal(randomBytes, registration.checkData) - }) - - it('reverts if new data exceeds limit', async () => { - let longBytes = '0x' - for (let i = 0; i < 10000; i++) { - longBytes += '1' - } - - await evmRevertCustomError( - registry.connect(admin).setUpkeepCheckData(upkeepId, longBytes), - registry, - 'CheckDataExceedsLimit', - ) - }) - - it('updates the upkeep check data and emits an event', async () => { - const tx = await registry - .connect(admin) - .setUpkeepCheckData(upkeepId, randomBytes) - await expect(tx) - .to.emit(registry, 'UpkeepCheckDataSet') - .withArgs(upkeepId, randomBytes) - - const registration = await registry.getUpkeep(upkeepId) - assert.equal(randomBytes, registration.checkData) - }) - }) - - describe('#setUpkeepGasLimit', () => { - const newGasLimit = BigNumber.from('300000') - - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry.connect(admin).setUpkeepGasLimit(upkeepId.add(1), newGasLimit), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the upkeep is canceled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - await evmRevertCustomError( - registry.connect(admin).setUpkeepGasLimit(upkeepId, newGasLimit), - registry, - 'UpkeepCancelled', - ) - }) - - it('reverts if called by anyone but the admin', async () => { - await evmRevertCustomError( - registry.connect(owner).setUpkeepGasLimit(upkeepId, newGasLimit), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if new gas limit is out of bounds', async () => { - await evmRevertCustomError( - registry - .connect(admin) - .setUpkeepGasLimit(upkeepId, BigNumber.from('100')), - registry, - 'GasLimitOutsideRange', - ) - await evmRevertCustomError( - registry - .connect(admin) - .setUpkeepGasLimit(upkeepId, BigNumber.from('6000000')), - registry, - 'GasLimitOutsideRange', - ) - }) - - it('updates the gas limit successfully', async () => { - const initialGasLimit = (await registry.getUpkeep(upkeepId)).performGas - assert.equal(initialGasLimit, performGas.toNumber()) - await registry.connect(admin).setUpkeepGasLimit(upkeepId, newGasLimit) - const updatedGasLimit = (await registry.getUpkeep(upkeepId)).performGas - assert.equal(updatedGasLimit, newGasLimit.toNumber()) - }) - - it('emits a log', async () => { - const tx = await registry - .connect(admin) - .setUpkeepGasLimit(upkeepId, newGasLimit) - await expect(tx) - .to.emit(registry, 'UpkeepGasLimitSet') - .withArgs(upkeepId, newGasLimit) - }) - }) - - describe('#setUpkeepOffchainConfig', () => { - const newConfig = '0xc0ffeec0ffee' - - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry - .connect(admin) - .setUpkeepOffchainConfig(upkeepId.add(1), newConfig), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the upkeep is canceled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - await evmRevertCustomError( - registry.connect(admin).setUpkeepOffchainConfig(upkeepId, newConfig), - registry, - 'UpkeepCancelled', - ) - }) - - it('reverts if called by anyone but the admin', async () => { - await evmRevertCustomError( - registry.connect(owner).setUpkeepOffchainConfig(upkeepId, newConfig), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('updates the config successfully', async () => { - const initialConfig = (await registry.getUpkeep(upkeepId)).offchainConfig - assert.equal(initialConfig, '0x') - await registry.connect(admin).setUpkeepOffchainConfig(upkeepId, newConfig) - const updatedConfig = (await registry.getUpkeep(upkeepId)).offchainConfig - assert.equal(newConfig, updatedConfig) - }) - - it('emits a log', async () => { - const tx = await registry - .connect(admin) - .setUpkeepOffchainConfig(upkeepId, newConfig) - await expect(tx) - .to.emit(registry, 'UpkeepOffchainConfigSet') - .withArgs(upkeepId, newConfig) - }) - }) - - describe('#setUpkeepTriggerConfig', () => { - const newConfig = '0xdeadbeef' - - it('reverts if the registration does not exist', async () => { - await evmRevertCustomError( - registry - .connect(admin) - .setUpkeepTriggerConfig(upkeepId.add(1), newConfig), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts if the upkeep is canceled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - await evmRevertCustomError( - registry.connect(admin).setUpkeepTriggerConfig(upkeepId, newConfig), - registry, - 'UpkeepCancelled', - ) - }) - - it('reverts if called by anyone but the admin', async () => { - await evmRevertCustomError( - registry.connect(owner).setUpkeepTriggerConfig(upkeepId, newConfig), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('emits a log', async () => { - const tx = await registry - .connect(admin) - .setUpkeepTriggerConfig(upkeepId, newConfig) - await expect(tx) - .to.emit(registry, 'UpkeepTriggerConfigSet') - .withArgs(upkeepId, newConfig) - }) - }) - - describe('#transferUpkeepAdmin', () => { - it('reverts when called by anyone but the current upkeep admin', async () => { - await evmRevertCustomError( - registry - .connect(payee1) - .transferUpkeepAdmin(upkeepId, await payee2.getAddress()), - registry, - 'OnlyCallableByAdmin', - ) - }) - - it('reverts when transferring to self', async () => { - await evmRevertCustomError( - registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await admin.getAddress()), - registry, - 'ValueNotChanged', - ) - }) - - it('reverts when the upkeep is cancelled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - await evmRevertCustomError( - registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await keeper1.getAddress()), - registry, - 'UpkeepCancelled', - ) - }) - - it('allows cancelling transfer by reverting to zero address', async () => { - await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - const tx = await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, ethers.constants.AddressZero) - - await expect(tx) - .to.emit(registry, 'UpkeepAdminTransferRequested') - .withArgs( - upkeepId, - await admin.getAddress(), - ethers.constants.AddressZero, - ) - }) - - it('does not change the upkeep admin', async () => { - await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - - const upkeep = await registry.getUpkeep(upkeepId) - assert.equal(await admin.getAddress(), upkeep.admin) - }) - - it('emits an event announcing the new upkeep admin', async () => { - const tx = await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - - await expect(tx) - .to.emit(registry, 'UpkeepAdminTransferRequested') - .withArgs(upkeepId, await admin.getAddress(), await payee1.getAddress()) - }) - - it('does not emit an event when called with the same proposed upkeep admin', async () => { - await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - - const tx = await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - const receipt = await tx.wait() - assert.equal(0, receipt.logs.length) - }) - }) - - describe('#acceptUpkeepAdmin', () => { - beforeEach(async () => { - // Start admin transfer to payee1 - await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - }) - - it('reverts when not called by the proposed upkeep admin', async () => { - await evmRevertCustomError( - registry.connect(payee2).acceptUpkeepAdmin(upkeepId), - registry, - 'OnlyCallableByProposedAdmin', - ) - }) - - it('reverts when the upkeep is cancelled', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - await evmRevertCustomError( - registry.connect(payee1).acceptUpkeepAdmin(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - - it('does change the admin', async () => { - await registry.connect(payee1).acceptUpkeepAdmin(upkeepId) - - const upkeep = await registry.getUpkeep(upkeepId) - assert.equal(await payee1.getAddress(), upkeep.admin) - }) - - it('emits an event announcing the new upkeep admin', async () => { - const tx = await registry.connect(payee1).acceptUpkeepAdmin(upkeepId) - await expect(tx) - .to.emit(registry, 'UpkeepAdminTransferred') - .withArgs(upkeepId, await admin.getAddress(), await payee1.getAddress()) - }) - }) - - describe('#withdrawOwnerFunds', () => { - it('can only be called by owner', async () => { - await evmRevert( - registry.connect(keeper1).withdrawOwnerFunds(), - 'Only callable by owner', - ) - }) - - itMaybe('withdraws the collected fees to owner', async () => { - await registry.connect(admin).addFunds(upkeepId, toWei('100')) - // Very high min spend, whole balance as cancellation fees - const minUpkeepSpend = toWei('1000') - await registry.connect(owner).setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrars: [], - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModuleBase.address, - reorgProtectionEnabled: true, - }, - offchainVersion, - offchainBytes, - ) - const upkeepBalance = (await registry.getUpkeep(upkeepId)).balance - const ownerBefore = await linkToken.balanceOf(await owner.getAddress()) - - await registry.connect(owner).cancelUpkeep(upkeepId) - - // Transfered to owner balance on registry - let ownerRegistryBalance = (await registry.getState()).state - .ownerLinkBalance - assert.isTrue(ownerRegistryBalance.eq(upkeepBalance)) - - // Now withdraw - await registry.connect(owner).withdrawOwnerFunds() - - ownerRegistryBalance = (await registry.getState()).state.ownerLinkBalance - const ownerAfter = await linkToken.balanceOf(await owner.getAddress()) - - // Owner registry balance should be changed to 0 - assert.isTrue(ownerRegistryBalance.eq(BigNumber.from('0'))) - - // Owner should be credited with the balance - assert.isTrue(ownerBefore.add(upkeepBalance).eq(ownerAfter)) - }) - }) - - describe('#transferPayeeship', () => { - it('reverts when called by anyone but the current payee', async () => { - await evmRevertCustomError( - registry - .connect(payee2) - .transferPayeeship( - await keeper1.getAddress(), - await payee2.getAddress(), - ), - registry, - 'OnlyCallableByPayee', - ) - }) - - it('reverts when transferring to self', async () => { - await evmRevertCustomError( - registry - .connect(payee1) - .transferPayeeship( - await keeper1.getAddress(), - await payee1.getAddress(), - ), - registry, - 'ValueNotChanged', - ) - }) - - it('does not change the payee', async () => { - await registry - .connect(payee1) - .transferPayeeship( - await keeper1.getAddress(), - await payee2.getAddress(), - ) - - const info = await registry.getTransmitterInfo(await keeper1.getAddress()) - assert.equal(await payee1.getAddress(), info.payee) - }) - - it('emits an event announcing the new payee', async () => { - const tx = await registry - .connect(payee1) - .transferPayeeship( - await keeper1.getAddress(), - await payee2.getAddress(), - ) - await expect(tx) - .to.emit(registry, 'PayeeshipTransferRequested') - .withArgs( - await keeper1.getAddress(), - await payee1.getAddress(), - await payee2.getAddress(), - ) - }) - - it('does not emit an event when called with the same proposal', async () => { - await registry - .connect(payee1) - .transferPayeeship( - await keeper1.getAddress(), - await payee2.getAddress(), - ) - - const tx = await registry - .connect(payee1) - .transferPayeeship( - await keeper1.getAddress(), - await payee2.getAddress(), - ) - const receipt = await tx.wait() - assert.equal(0, receipt.logs.length) - }) - }) - - describe('#acceptPayeeship', () => { - beforeEach(async () => { - await registry - .connect(payee1) - .transferPayeeship( - await keeper1.getAddress(), - await payee2.getAddress(), - ) - }) - - it('reverts when called by anyone but the proposed payee', async () => { - await evmRevertCustomError( - registry.connect(payee1).acceptPayeeship(await keeper1.getAddress()), - registry, - 'OnlyCallableByProposedPayee', - ) - }) - - it('emits an event announcing the new payee', async () => { - const tx = await registry - .connect(payee2) - .acceptPayeeship(await keeper1.getAddress()) - await expect(tx) - .to.emit(registry, 'PayeeshipTransferred') - .withArgs( - await keeper1.getAddress(), - await payee1.getAddress(), - await payee2.getAddress(), - ) - }) - - it('does change the payee', async () => { - await registry.connect(payee2).acceptPayeeship(await keeper1.getAddress()) - - const info = await registry.getTransmitterInfo(await keeper1.getAddress()) - assert.equal(await payee2.getAddress(), info.payee) - }) - }) - - describe('#pause', () => { - it('reverts if called by a non-owner', async () => { - await evmRevert( - registry.connect(keeper1).pause(), - 'Only callable by owner', - ) - }) - - it('marks the contract as paused', async () => { - assert.isFalse((await registry.getState()).state.paused) - - await registry.connect(owner).pause() - - assert.isTrue((await registry.getState()).state.paused) - }) - - it('Does not allow transmits when paused', async () => { - await registry.connect(owner).pause() - - await evmRevertCustomError( - getTransmitTx(registry, keeper1, [upkeepId]), - registry, - 'RegistryPaused', - ) - }) - - it('Does not allow creation of new upkeeps when paused', async () => { - await registry.connect(owner).pause() - - await evmRevertCustomError( - registry - .connect(owner) - [ - 'registerUpkeep(address,uint32,address,bytes,bytes)' - ](mock.address, performGas, await admin.getAddress(), emptyBytes, '0x'), - registry, - 'RegistryPaused', - ) - }) - }) - - describe('#unpause', () => { - beforeEach(async () => { - await registry.connect(owner).pause() - }) - - it('reverts if called by a non-owner', async () => { - await evmRevert( - registry.connect(keeper1).unpause(), - 'Only callable by owner', - ) - }) - - it('marks the contract as not paused', async () => { - assert.isTrue((await registry.getState()).state.paused) - - await registry.connect(owner).unpause() - - assert.isFalse((await registry.getState()).state.paused) - }) - }) - - describe('#migrateUpkeeps() / #receiveUpkeeps()', async () => { - context('when permissions are set', () => { - beforeEach(async () => { - await linkToken.connect(owner).approve(registry.address, toWei('100')) - await registry.connect(owner).addFunds(upkeepId, toWei('100')) - await registry.setPeerRegistryMigrationPermission(mgRegistry.address, 1) - await mgRegistry.setPeerRegistryMigrationPermission(registry.address, 2) - }) - - it('migrates an upkeep', async () => { - const offchainBytes = '0x987654abcd' - await registry - .connect(admin) - .setUpkeepOffchainConfig(upkeepId, offchainBytes) - const reg1Upkeep = await registry.getUpkeep(upkeepId) - const forwarderAddress = await registry.getForwarder(upkeepId) - expect(reg1Upkeep.balance).to.equal(toWei('100')) - expect(reg1Upkeep.checkData).to.equal(randomBytes) - expect(forwarderAddress).to.not.equal(ethers.constants.AddressZero) - expect(reg1Upkeep.offchainConfig).to.equal(offchainBytes) - expect((await registry.getState()).state.numUpkeeps).to.equal( - numUpkeeps, - ) - const forwarder = IAutomationForwarderFactory.connect( - forwarderAddress, - owner, - ) - expect(await forwarder.getRegistry()).to.equal(registry.address) - // Set an upkeep admin transfer in progress too - await registry - .connect(admin) - .transferUpkeepAdmin(upkeepId, await payee1.getAddress()) - - // migrate - await registry - .connect(admin) - .migrateUpkeeps([upkeepId], mgRegistry.address) - expect((await registry.getState()).state.numUpkeeps).to.equal( - numUpkeeps - 1, - ) - expect((await mgRegistry.getState()).state.numUpkeeps).to.equal(1) - expect((await registry.getUpkeep(upkeepId)).balance).to.equal(0) - expect((await registry.getUpkeep(upkeepId)).checkData).to.equal('0x') - expect((await mgRegistry.getUpkeep(upkeepId)).balance).to.equal( - toWei('100'), - ) - expect( - (await mgRegistry.getState()).state.expectedLinkBalance, - ).to.equal(toWei('100')) - expect((await mgRegistry.getUpkeep(upkeepId)).checkData).to.equal( - randomBytes, - ) - expect((await mgRegistry.getUpkeep(upkeepId)).offchainConfig).to.equal( - offchainBytes, - ) - expect(await mgRegistry.getForwarder(upkeepId)).to.equal( - forwarderAddress, - ) - // test that registry is updated on forwarder - expect(await forwarder.getRegistry()).to.equal(mgRegistry.address) - // migration will delete the upkeep and nullify admin transfer - await expect( - registry.connect(payee1).acceptUpkeepAdmin(upkeepId), - ).to.be.revertedWithCustomError(registry, 'UpkeepCancelled') - await expect( - mgRegistry.connect(payee1).acceptUpkeepAdmin(upkeepId), - ).to.be.revertedWithCustomError( - mgRegistry, - 'OnlyCallableByProposedAdmin', - ) - }) - - it('migrates a paused upkeep', async () => { - expect((await registry.getUpkeep(upkeepId)).balance).to.equal( - toWei('100'), - ) - expect((await registry.getUpkeep(upkeepId)).checkData).to.equal( - randomBytes, - ) - expect((await registry.getState()).state.numUpkeeps).to.equal( - numUpkeeps, - ) - await registry.connect(admin).pauseUpkeep(upkeepId) - // verify the upkeep is paused - expect((await registry.getUpkeep(upkeepId)).paused).to.equal(true) - // migrate - await registry - .connect(admin) - .migrateUpkeeps([upkeepId], mgRegistry.address) - expect((await registry.getState()).state.numUpkeeps).to.equal( - numUpkeeps - 1, - ) - expect((await mgRegistry.getState()).state.numUpkeeps).to.equal(1) - expect((await registry.getUpkeep(upkeepId)).balance).to.equal(0) - expect((await mgRegistry.getUpkeep(upkeepId)).balance).to.equal( - toWei('100'), - ) - expect((await registry.getUpkeep(upkeepId)).checkData).to.equal('0x') - expect((await mgRegistry.getUpkeep(upkeepId)).checkData).to.equal( - randomBytes, - ) - expect( - (await mgRegistry.getState()).state.expectedLinkBalance, - ).to.equal(toWei('100')) - // verify the upkeep is still paused after migration - expect((await mgRegistry.getUpkeep(upkeepId)).paused).to.equal(true) - }) - - it('emits an event on both contracts', async () => { - expect((await registry.getUpkeep(upkeepId)).balance).to.equal( - toWei('100'), - ) - expect((await registry.getUpkeep(upkeepId)).checkData).to.equal( - randomBytes, - ) - expect((await registry.getState()).state.numUpkeeps).to.equal( - numUpkeeps, - ) - const tx = registry - .connect(admin) - .migrateUpkeeps([upkeepId], mgRegistry.address) - await expect(tx) - .to.emit(registry, 'UpkeepMigrated') - .withArgs(upkeepId, toWei('100'), mgRegistry.address) - await expect(tx) - .to.emit(mgRegistry, 'UpkeepReceived') - .withArgs(upkeepId, toWei('100'), registry.address) - }) - - it('is only migratable by the admin', async () => { - await expect( - registry - .connect(owner) - .migrateUpkeeps([upkeepId], mgRegistry.address), - ).to.be.revertedWithCustomError(registry, 'OnlyCallableByAdmin') - await registry - .connect(admin) - .migrateUpkeeps([upkeepId], mgRegistry.address) - }) - }) - - context('when permissions are not set', () => { - it('reverts', async () => { - // no permissions - await registry.setPeerRegistryMigrationPermission(mgRegistry.address, 0) - await mgRegistry.setPeerRegistryMigrationPermission(registry.address, 0) - await expect(registry.migrateUpkeeps([upkeepId], mgRegistry.address)).to - .be.reverted - // only outgoing permissions - await registry.setPeerRegistryMigrationPermission(mgRegistry.address, 1) - await mgRegistry.setPeerRegistryMigrationPermission(registry.address, 0) - await expect(registry.migrateUpkeeps([upkeepId], mgRegistry.address)).to - .be.reverted - // only incoming permissions - await registry.setPeerRegistryMigrationPermission(mgRegistry.address, 0) - await mgRegistry.setPeerRegistryMigrationPermission(registry.address, 2) - await expect(registry.migrateUpkeeps([upkeepId], mgRegistry.address)).to - .be.reverted - // permissions opposite direction - await registry.setPeerRegistryMigrationPermission(mgRegistry.address, 2) - await mgRegistry.setPeerRegistryMigrationPermission(registry.address, 1) - await expect(registry.migrateUpkeeps([upkeepId], mgRegistry.address)).to - .be.reverted - }) - }) - }) - - describe('#setPayees', () => { - const IGNORE_ADDRESS = '0xFFfFfFffFFfffFFfFFfFFFFFffFFFffffFfFFFfF' - - it('reverts when not called by the owner', async () => { - await evmRevert( - registry.connect(keeper1).setPayees(payees), - 'Only callable by owner', - ) - }) - - it('reverts with different numbers of payees than transmitters', async () => { - await evmRevertCustomError( - registry.connect(owner).setPayees([...payees, randomAddress()]), - registry, - 'ParameterLengthError', - ) - }) - - it('reverts if the payee is the zero address', async () => { - await blankRegistry.connect(owner).setConfigTypeSafe(...baseConfig) // used to test initial config - - await evmRevertCustomError( - blankRegistry // used to test initial config - .connect(owner) - .setPayees([ethers.constants.AddressZero, ...payees.slice(1)]), - registry, - 'InvalidPayee', - ) - }) - - itMaybe( - 'sets the payees when exisitng payees are zero address', - async () => { - //Initial payees should be zero address - await blankRegistry.connect(owner).setConfigTypeSafe(...baseConfig) // used to test initial config - - for (let i = 0; i < keeperAddresses.length; i++) { - const payee = ( - await blankRegistry.getTransmitterInfo(keeperAddresses[i]) - ).payee // used to test initial config - assert.equal(payee, zeroAddress) - } - - await blankRegistry.connect(owner).setPayees(payees) // used to test initial config - - for (let i = 0; i < keeperAddresses.length; i++) { - const payee = ( - await blankRegistry.getTransmitterInfo(keeperAddresses[i]) - ).payee - assert.equal(payee, payees[i]) - } - }, - ) - - it('does not change the payee if IGNORE_ADDRESS is used as payee', async () => { - const signers = Array.from({ length: 5 }, randomAddress) - const keepers = Array.from({ length: 5 }, randomAddress) - const payees = Array.from({ length: 5 }, randomAddress) - const newTransmitter = randomAddress() - const newPayee = randomAddress() - const ignoreAddresses = new Array(payees.length).fill(IGNORE_ADDRESS) - const newPayees = [...ignoreAddresses, newPayee] - // arbitrum registry - // configure registry with 5 keepers // optimism registry - await blankRegistry // used to test initial configurations - .connect(owner) - .setConfigTypeSafe( - signers, - keepers, - f, - config, - offchainVersion, - offchainBytes, - ) - // arbitrum registry - // set initial payees // optimism registry - await blankRegistry.connect(owner).setPayees(payees) // used to test initial configurations - // arbitrum registry - // add another keeper // optimism registry - await blankRegistry // used to test initial configurations - .connect(owner) - .setConfigTypeSafe( - [...signers, randomAddress()], - [...keepers, newTransmitter], - f, - config, - offchainVersion, - offchainBytes, - ) - // arbitrum registry - // update payee list // optimism registry // arbitrum registry - await blankRegistry.connect(owner).setPayees(newPayees) // used to test initial configurations // optimism registry - const ignored = await blankRegistry.getTransmitterInfo(newTransmitter) // used to test initial configurations - assert.equal(newPayee, ignored.payee) - assert.equal(true, ignored.active) - }) - - it('reverts if payee is non zero and owner tries to change payee', async () => { - const newPayees = [randomAddress(), ...payees.slice(1)] - - await evmRevertCustomError( - registry.connect(owner).setPayees(newPayees), - registry, - 'InvalidPayee', - ) - }) - - it('emits events for every payee added and removed', async () => { - const tx = await registry.connect(owner).setPayees(payees) - await expect(tx) - .to.emit(registry, 'PayeesUpdated') - .withArgs(keeperAddresses, payees) - }) - }) - - describe('#cancelUpkeep', () => { - it('reverts if the ID is not valid', async () => { - await evmRevertCustomError( - registry.connect(owner).cancelUpkeep(upkeepId.add(1)), - registry, - 'CannotCancel', - ) - }) - - it('reverts if called by a non-owner/non-admin', async () => { - await evmRevertCustomError( - registry.connect(keeper1).cancelUpkeep(upkeepId), - registry, - 'OnlyCallableByOwnerOrAdmin', - ) - }) - - describe('when called by the owner', async () => { - it('sets the registration to invalid immediately', async () => { - const tx = await registry.connect(owner).cancelUpkeep(upkeepId) - const receipt = await tx.wait() - const registration = await registry.getUpkeep(upkeepId) - assert.equal( - registration.maxValidBlocknumber.toNumber(), - receipt.blockNumber, - ) - }) - - it('emits an event', async () => { - const tx = await registry.connect(owner).cancelUpkeep(upkeepId) - const receipt = await tx.wait() - await expect(tx) - .to.emit(registry, 'UpkeepCanceled') - .withArgs(upkeepId, BigNumber.from(receipt.blockNumber)) - }) - - it('immediately prevents upkeep', async () => { - await registry.connect(owner).cancelUpkeep(upkeepId) - - const tx = await getTransmitTx(registry, keeper1, [upkeepId]) - const receipt = await tx.wait() - const cancelledUpkeepReportLogs = - parseCancelledUpkeepReportLogs(receipt) - // exactly 1 CancelledUpkeepReport log should be emitted - assert.equal(cancelledUpkeepReportLogs.length, 1) - }) - - it('does not revert if reverts if called multiple times', async () => { - await registry.connect(owner).cancelUpkeep(upkeepId) - await evmRevertCustomError( - registry.connect(owner).cancelUpkeep(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - - describe('when called by the owner when the admin has just canceled', () => { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - let oldExpiration: BigNumber - - beforeEach(async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - const registration = await registry.getUpkeep(upkeepId) - oldExpiration = registration.maxValidBlocknumber - }) - - it('reverts with proper error', async () => { - await evmRevertCustomError( - registry.connect(owner).cancelUpkeep(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - }) - }) - - describe('when called by the admin', async () => { - it('reverts if called again by the admin', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - await evmRevertCustomError( - registry.connect(admin).cancelUpkeep(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - - it('reverts if called by the owner after the timeout', async () => { - await registry.connect(admin).cancelUpkeep(upkeepId) - - for (let i = 0; i < cancellationDelay; i++) { - await ethers.provider.send('evm_mine', []) - } - - await evmRevertCustomError( - registry.connect(owner).cancelUpkeep(upkeepId), - registry, - 'UpkeepCancelled', - ) - }) - - it('sets the registration to invalid in 50 blocks', async () => { - const tx = await registry.connect(admin).cancelUpkeep(upkeepId) - const receipt = await tx.wait() - const registration = await registry.getUpkeep(upkeepId) - assert.equal( - registration.maxValidBlocknumber.toNumber(), - receipt.blockNumber + 50, - ) - }) - - it('emits an event', async () => { - const tx = await registry.connect(admin).cancelUpkeep(upkeepId) - const receipt = await tx.wait() - await expect(tx) - .to.emit(registry, 'UpkeepCanceled') - .withArgs( - upkeepId, - BigNumber.from(receipt.blockNumber + cancellationDelay), - ) - }) - - it('immediately prevents upkeep', async () => { - await linkToken.connect(owner).approve(registry.address, toWei('100')) - await registry.connect(owner).addFunds(upkeepId, toWei('100')) - await registry.connect(admin).cancelUpkeep(upkeepId) - - await getTransmitTx(registry, keeper1, [upkeepId]) - - for (let i = 0; i < cancellationDelay; i++) { - await ethers.provider.send('evm_mine', []) - } - - const tx = await getTransmitTx(registry, keeper1, [upkeepId]) - - const receipt = await tx.wait() - const cancelledUpkeepReportLogs = - parseCancelledUpkeepReportLogs(receipt) - // exactly 1 CancelledUpkeepReport log should be emitted - assert.equal(cancelledUpkeepReportLogs.length, 1) - }) - - describeMaybe('when an upkeep has been performed', async () => { - beforeEach(async () => { - await linkToken.connect(owner).approve(registry.address, toWei('100')) - await registry.connect(owner).addFunds(upkeepId, toWei('100')) - await getTransmitTx(registry, keeper1, [upkeepId]) - }) - - it('deducts a cancellation fee from the upkeep and gives to owner', async () => { - const minUpkeepSpend = toWei('10') - - await registry.connect(owner).setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrars: [], - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModuleBase.address, - reorgProtectionEnabled: true, - }, - offchainVersion, - offchainBytes, - ) - - const payee1Before = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const upkeepBefore = (await registry.getUpkeep(upkeepId)).balance - const ownerBefore = (await registry.getState()).state.ownerLinkBalance - - const amountSpent = toWei('100').sub(upkeepBefore) - const cancellationFee = minUpkeepSpend.sub(amountSpent) - - await registry.connect(admin).cancelUpkeep(upkeepId) - - const payee1After = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const upkeepAfter = (await registry.getUpkeep(upkeepId)).balance - const ownerAfter = (await registry.getState()).state.ownerLinkBalance - - // post upkeep balance should be previous balance minus cancellation fee - assert.isTrue(upkeepBefore.sub(cancellationFee).eq(upkeepAfter)) - // payee balance should not change - assert.isTrue(payee1Before.eq(payee1After)) - // owner should receive the cancellation fee - assert.isTrue(ownerAfter.sub(ownerBefore).eq(cancellationFee)) - }) - - it('deducts up to balance as cancellation fee', async () => { - // Very high min spend, should deduct whole balance as cancellation fees - const minUpkeepSpend = toWei('1000') - await registry.connect(owner).setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrars: [], - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModuleBase.address, - reorgProtectionEnabled: true, - }, - offchainVersion, - offchainBytes, - ) - const payee1Before = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const upkeepBefore = (await registry.getUpkeep(upkeepId)).balance - const ownerBefore = (await registry.getState()).state.ownerLinkBalance - - await registry.connect(admin).cancelUpkeep(upkeepId) - const payee1After = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const ownerAfter = (await registry.getState()).state.ownerLinkBalance - const upkeepAfter = (await registry.getUpkeep(upkeepId)).balance - - // all upkeep balance is deducted for cancellation fee - assert.equal(0, upkeepAfter.toNumber()) - // payee balance should not change - assert.isTrue(payee1After.eq(payee1Before)) - // all upkeep balance is transferred to the owner - assert.isTrue(ownerAfter.sub(ownerBefore).eq(upkeepBefore)) - }) - - it('does not deduct cancellation fee if more than minUpkeepSpend is spent', async () => { - // Very low min spend, already spent in one perform upkeep - const minUpkeepSpend = BigNumber.from(420) - await registry.connect(owner).setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrars: [], - upkeepPrivilegeManager: upkeepManager, - chainModule: chainModuleBase.address, - reorgProtectionEnabled: true, - }, - offchainVersion, - offchainBytes, - ) - const payee1Before = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const upkeepBefore = (await registry.getUpkeep(upkeepId)).balance - const ownerBefore = (await registry.getState()).state.ownerLinkBalance - - await registry.connect(admin).cancelUpkeep(upkeepId) - const payee1After = await linkToken.balanceOf( - await payee1.getAddress(), - ) - const ownerAfter = (await registry.getState()).state.ownerLinkBalance - const upkeepAfter = (await registry.getUpkeep(upkeepId)).balance - - // upkeep does not pay cancellation fee after cancellation because minimum upkeep spent is met - assert.isTrue(upkeepBefore.eq(upkeepAfter)) - // owner balance does not change - assert.isTrue(ownerAfter.eq(ownerBefore)) - // payee balance does not change - assert.isTrue(payee1Before.eq(payee1After)) - }) - }) - }) - }) - - describe('#withdrawPayment', () => { - beforeEach(async () => { - await linkToken.connect(owner).approve(registry.address, toWei('100')) - await registry.connect(owner).addFunds(upkeepId, toWei('100')) - await getTransmitTx(registry, keeper1, [upkeepId]) - }) - - it('reverts if called by anyone but the payee', async () => { - await evmRevertCustomError( - registry - .connect(payee2) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ), - registry, - 'OnlyCallableByPayee', - ) - }) - - it('reverts if called with the 0 address', async () => { - await evmRevertCustomError( - registry - .connect(payee2) - .withdrawPayment(await keeper1.getAddress(), zeroAddress), - registry, - 'InvalidRecipient', - ) - }) - - it('updates the balances', async () => { - const to = await nonkeeper.getAddress() - const keeperBefore = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const registrationBefore = (await registry.getUpkeep(upkeepId)).balance - const toLinkBefore = await linkToken.balanceOf(to) - const registryLinkBefore = await linkToken.balanceOf(registry.address) - const registryPremiumBefore = (await registry.getState()).state - .totalPremium - const ownerBefore = (await registry.getState()).state.ownerLinkBalance - - // Withdrawing for first time, last collected = 0 - assert.equal(keeperBefore.lastCollected.toString(), '0') - - //// Do the thing - await registry - .connect(payee1) - .withdrawPayment(await keeper1.getAddress(), to) - - const keeperAfter = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const registrationAfter = (await registry.getUpkeep(upkeepId)).balance - const toLinkAfter = await linkToken.balanceOf(to) - const registryLinkAfter = await linkToken.balanceOf(registry.address) - const registryPremiumAfter = (await registry.getState()).state - .totalPremium - const ownerAfter = (await registry.getState()).state.ownerLinkBalance - - // registry total premium should not change - assert.isTrue(registryPremiumBefore.eq(registryPremiumAfter)) - - // Last collected should be updated to premium-change - assert.isTrue( - keeperAfter.lastCollected.eq( - registryPremiumBefore.sub( - registryPremiumBefore.mod(keeperAddresses.length), - ), - ), - ) - - // owner balance should remain unchanged - assert.isTrue(ownerAfter.eq(ownerBefore)) - - assert.isTrue(keeperAfter.balance.eq(BigNumber.from(0))) - assert.isTrue(registrationBefore.eq(registrationAfter)) - assert.isTrue(toLinkBefore.add(keeperBefore.balance).eq(toLinkAfter)) - assert.isTrue( - registryLinkBefore.sub(keeperBefore.balance).eq(registryLinkAfter), - ) - }) - - it('emits a log announcing the withdrawal', async () => { - const balance = ( - await registry.getTransmitterInfo(await keeper1.getAddress()) - ).balance - const tx = await registry - .connect(payee1) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ) - await expect(tx) - .to.emit(registry, 'PaymentWithdrawn') - .withArgs( - await keeper1.getAddress(), - balance, - await nonkeeper.getAddress(), - await payee1.getAddress(), - ) - }) - }) - - describe('#checkCallback', () => { - it('returns false with appropriate failure reason when target callback reverts', async () => { - await streamsLookupUpkeep.setShouldRevertCallback(true) - - const values: any[] = ['0x1234', '0xabcd'] - const res = await registry - .connect(zeroAddress) - .callStatic.checkCallback(streamsLookupUpkeepId, values, '0x') - - assert.isFalse(res.upkeepNeeded) - assert.equal(res.performData, '0x') - assert.equal( - res.upkeepFailureReason, - UpkeepFailureReason.CHECK_CALLBACK_REVERTED, - ) - assert.isTrue(res.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - }) - - it('returns false with appropriate failure reason when target callback returns big performData', async () => { - let longBytes = '0x' - for (let i = 0; i <= maxPerformDataSize.toNumber(); i++) { - longBytes += '11' - } - const values: any[] = [longBytes, longBytes] - const res = await registry - .connect(zeroAddress) - .callStatic.checkCallback(streamsLookupUpkeepId, values, '0x') - - assert.isFalse(res.upkeepNeeded) - assert.equal(res.performData, '0x') - assert.equal( - res.upkeepFailureReason, - UpkeepFailureReason.PERFORM_DATA_EXCEEDS_LIMIT, - ) - assert.isTrue(res.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - }) - - it('returns false with appropriate failure reason when target callback returns false', async () => { - await streamsLookupUpkeep.setCallbackReturnBool(false) - const values: any[] = ['0x1234', '0xabcd'] - const res = await registry - .connect(zeroAddress) - .callStatic.checkCallback(streamsLookupUpkeepId, values, '0x') - - assert.isFalse(res.upkeepNeeded) - assert.equal(res.performData, '0x') - assert.equal( - res.upkeepFailureReason, - UpkeepFailureReason.UPKEEP_NOT_NEEDED, - ) - assert.isTrue(res.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - }) - - it('succeeds with upkeep needed', async () => { - const values: any[] = ['0x1234', '0xabcd'] - - const res = await registry - .connect(zeroAddress) - .callStatic.checkCallback(streamsLookupUpkeepId, values, '0x') - const expectedPerformData = ethers.utils.defaultAbiCoder.encode( - ['bytes[]', 'bytes'], - [values, '0x'], - ) - - assert.isTrue(res.upkeepNeeded) - assert.equal(res.performData, expectedPerformData) - assert.equal(res.upkeepFailureReason, UpkeepFailureReason.NONE) - assert.isTrue(res.gasUsed.gt(BigNumber.from('0'))) // Some gas should be used - }) - }) - - describe('#setUpkeepPrivilegeConfig() / #getUpkeepPrivilegeConfig()', () => { - it('reverts when non manager tries to set privilege config', async () => { - await evmRevertCustomError( - registry.connect(payee3).setUpkeepPrivilegeConfig(upkeepId, '0x1234'), - registry, - 'OnlyCallableByUpkeepPrivilegeManager', - ) - }) - - it('returns empty bytes for upkeep privilege config before setting', async () => { - const cfg = await registry.getUpkeepPrivilegeConfig(upkeepId) - assert.equal(cfg, '0x') - }) - - it('allows upkeep manager to set privilege config', async () => { - const tx = await registry - .connect(personas.Norbert) - .setUpkeepPrivilegeConfig(upkeepId, '0x1234') - await expect(tx) - .to.emit(registry, 'UpkeepPrivilegeConfigSet') - .withArgs(upkeepId, '0x1234') - - const cfg = await registry.getUpkeepPrivilegeConfig(upkeepId) - assert.equal(cfg, '0x1234') - }) - }) - - describe('#setAdminPrivilegeConfig() / #getAdminPrivilegeConfig()', () => { - const admin = randomAddress() - - it('reverts when non manager tries to set privilege config', async () => { - await evmRevertCustomError( - registry.connect(payee3).setAdminPrivilegeConfig(admin, '0x1234'), - registry, - 'OnlyCallableByUpkeepPrivilegeManager', - ) - }) - - it('returns empty bytes for upkeep privilege config before setting', async () => { - const cfg = await registry.getAdminPrivilegeConfig(admin) - assert.equal(cfg, '0x') - }) - - it('allows upkeep manager to set privilege config', async () => { - const tx = await registry - .connect(personas.Norbert) - .setAdminPrivilegeConfig(admin, '0x1234') - await expect(tx) - .to.emit(registry, 'AdminPrivilegeConfigSet') - .withArgs(admin, '0x1234') - - const cfg = await registry.getAdminPrivilegeConfig(admin) - assert.equal(cfg, '0x1234') - }) - }) - - describe('transmitterPremiumSplit [ @skip-coverage ]', () => { - beforeEach(async () => { - await linkToken.connect(owner).approve(registry.address, toWei('100')) - await registry.connect(owner).addFunds(upkeepId, toWei('100')) - }) - - it('splits premium evenly across transmitters', async () => { - // Do a transmit from keeper1 - await getTransmitTx(registry, keeper1, [upkeepId]) - - const registryPremium = (await registry.getState()).state.totalPremium - assert.isTrue(registryPremium.gt(BigNumber.from(0))) - - const premiumPerTransmitter = registryPremium.div( - BigNumber.from(keeperAddresses.length), - ) - const k1Balance = ( - await registry.getTransmitterInfo(await keeper1.getAddress()) - ).balance - // transmitter should be reimbursed for gas and get the premium - assert.isTrue(k1Balance.gt(premiumPerTransmitter)) - const k1GasReimbursement = k1Balance.sub(premiumPerTransmitter) - - const k2Balance = ( - await registry.getTransmitterInfo(await keeper2.getAddress()) - ).balance - // non transmitter should get its share of premium - assert.isTrue(k2Balance.eq(premiumPerTransmitter)) - - // Now do a transmit from keeper 2 - await getTransmitTx(registry, keeper2, [upkeepId]) - const registryPremiumNew = (await registry.getState()).state.totalPremium - assert.isTrue(registryPremiumNew.gt(registryPremium)) - const premiumPerTransmitterNew = registryPremiumNew.div( - BigNumber.from(keeperAddresses.length), - ) - const additionalPremium = premiumPerTransmitterNew.sub( - premiumPerTransmitter, - ) - - const k1BalanceNew = ( - await registry.getTransmitterInfo(await keeper1.getAddress()) - ).balance - // k1 should get the new premium - assert.isTrue( - k1BalanceNew.eq(k1GasReimbursement.add(premiumPerTransmitterNew)), - ) - - const k2BalanceNew = ( - await registry.getTransmitterInfo(await keeper2.getAddress()) - ).balance - // k2 should get gas reimbursement in addition to new premium - assert.isTrue(k2BalanceNew.gt(k2Balance.add(additionalPremium))) - }) - - it('updates last collected upon payment withdrawn', async () => { - // Do a transmit from keeper1 - await getTransmitTx(registry, keeper1, [upkeepId]) - - const registryPremium = (await registry.getState()).state.totalPremium - const k1 = await registry.getTransmitterInfo(await keeper1.getAddress()) - const k2 = await registry.getTransmitterInfo(await keeper2.getAddress()) - - // Withdrawing for first time, last collected = 0 - assert.isTrue(k1.lastCollected.eq(BigNumber.from(0))) - assert.isTrue(k2.lastCollected.eq(BigNumber.from(0))) - - //// Do the thing - await registry - .connect(payee1) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ) - - const k1New = await registry.getTransmitterInfo( - await keeper1.getAddress(), - ) - const k2New = await registry.getTransmitterInfo( - await keeper2.getAddress(), - ) - - // transmitter info lastCollected should be updated for k1, not for k2 - assert.isTrue( - k1New.lastCollected.eq( - registryPremium.sub(registryPremium.mod(keeperAddresses.length)), - ), - ) - assert.isTrue(k2New.lastCollected.eq(BigNumber.from(0))) - }) - - itMaybe( - 'maintains consistent balance information across all parties', - async () => { - // throughout transmits, withdrawals, setConfigs total claim on balances should remain less than expected balance - // some spare change can get lost but it should be less than maxAllowedSpareChange - - let maxAllowedSpareChange = BigNumber.from('0') - await verifyConsistentAccounting(maxAllowedSpareChange) - - await getTransmitTx(registry, keeper1, [upkeepId]) - maxAllowedSpareChange = maxAllowedSpareChange.add(BigNumber.from('31')) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry - .connect(payee1) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry - .connect(payee2) - .withdrawPayment( - await keeper2.getAddress(), - await nonkeeper.getAddress(), - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await getTransmitTx(registry, keeper1, [upkeepId]) - maxAllowedSpareChange = maxAllowedSpareChange.add(BigNumber.from('31')) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry.connect(owner).setConfigTypeSafe( - signerAddresses.slice(2, 15), // only use 2-14th index keepers - keeperAddresses.slice(2, 15), - f, - config, - offchainVersion, - offchainBytes, - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await getTransmitTx(registry, keeper3, [upkeepId], { - startingSignerIndex: 2, - }) - maxAllowedSpareChange = maxAllowedSpareChange.add(BigNumber.from('13')) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry - .connect(payee1) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry - .connect(payee3) - .withdrawPayment( - await keeper3.getAddress(), - await nonkeeper.getAddress(), - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry.connect(owner).setConfigTypeSafe( - signerAddresses.slice(0, 4), // only use 0-3rd index keepers - keeperAddresses.slice(0, 4), - f, - config, - offchainVersion, - offchainBytes, - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - await getTransmitTx(registry, keeper1, [upkeepId]) - maxAllowedSpareChange = maxAllowedSpareChange.add(BigNumber.from('4')) - await getTransmitTx(registry, keeper3, [upkeepId]) - maxAllowedSpareChange = maxAllowedSpareChange.add(BigNumber.from('4')) - - await verifyConsistentAccounting(maxAllowedSpareChange) - await registry - .connect(payee5) - .withdrawPayment( - await keeper5.getAddress(), - await nonkeeper.getAddress(), - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - - await registry - .connect(payee1) - .withdrawPayment( - await keeper1.getAddress(), - await nonkeeper.getAddress(), - ) - await verifyConsistentAccounting(maxAllowedSpareChange) - }, - ) - }) -}) diff --git a/contracts/test/v0.8/automation/CronUpkeep.test.ts b/contracts/test/v0.8/automation/CronUpkeep.test.ts deleted file mode 100644 index 7b769797f12..00000000000 --- a/contracts/test/v0.8/automation/CronUpkeep.test.ts +++ /dev/null @@ -1,576 +0,0 @@ -import moment from 'moment' -import { ethers } from 'hardhat' -import { Contract } from 'ethers' -import { assert, expect } from 'chai' -import { CronUpkeepTestHelper } from '../../../typechain/CronUpkeepTestHelper' -import { CronUpkeepDelegate } from '../../../typechain/CronUpkeepDelegate' -import { CronUpkeepFactory } from '../../../typechain/CronUpkeepFactory' -import { CronUpkeepTestHelper__factory as CronUpkeepTestHelperFactory } from '../../../typechain/factories/CronUpkeepTestHelper__factory' -import { CronInternalTestHelper } from '../../../typechain/CronInternalTestHelper' -import { CronReceiver } from '../../../typechain/CronReceiver' -import { BigNumber, BigNumberish } from '@ethersproject/bignumber' -import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import { validCrons } from '../../test-helpers/fixtures' -import * as h from '../../test-helpers/helpers' - -const { utils } = ethers -const { AddressZero } = ethers.constants - -const OWNABLE_ERR = 'Only callable by owner' -const CRON_NOT_FOUND_ERR = 'CronJobIDNotFound' - -let cron: CronUpkeepTestHelper -let cronFactory: CronUpkeepTestHelperFactory // the typechain factory that deploys cron upkeep contracts -let cronFactoryContract: CronUpkeepFactory // the cron factory contract -let cronDelegate: CronUpkeepDelegate -let cronTestHelper: CronInternalTestHelper -let cronReceiver1: CronReceiver -let cronReceiver2: CronReceiver - -let admin: SignerWithAddress -let owner: SignerWithAddress -let stranger: SignerWithAddress - -const timeStamp = 32503680000 // Jan 1, 3000 12:00AM -const basicCronString = '0 * * * *' - -let handler1Sig: string -let handler2Sig: string -let revertHandlerSig: string -let basicSpec: string - -async function assertJobIDsEqual(expected: number[]) { - const ids = (await cron.getActiveCronJobIDs()).map((n) => n.toNumber()) - assert.deepEqual(ids.sort(), expected.sort()) -} - -function decodePayload(payload: string) { - return utils.defaultAbiCoder.decode( - ['uint256', 'uint256', 'address', 'bytes'], - payload, - ) as [BigNumber, BigNumber, string, string] -} - -function encodePayload(payload: [BigNumberish, BigNumberish, string, string]) { - return utils.defaultAbiCoder.encode( - ['uint256', 'uint256', 'address', 'bytes'], - payload, - ) -} - -async function createBasicCron() { - return await cron.createCronJobFromEncodedSpec( - cronReceiver1.address, - handler1Sig, - basicSpec, - ) -} - -describe('CronUpkeep', () => { - beforeEach(async () => { - const accounts = await ethers.getSigners() - admin = accounts[0] - owner = accounts[1] - stranger = accounts[2] - const crFactory = await ethers.getContractFactory('CronReceiver', owner) - cronReceiver1 = await crFactory.deploy() - cronReceiver2 = await crFactory.deploy() - const cronDelegateFactory = await ethers.getContractFactory( - 'CronUpkeepDelegate', - admin, - ) - cronDelegate = await cronDelegateFactory.deploy() - const cronExternalFactory = await ethers.getContractFactory( - 'src/v0.8/automation/libraries/external/Cron.sol:Cron', - admin, - ) - const cronExternalLib = await cronExternalFactory.deploy() - cronFactory = await ethers.getContractFactory('CronUpkeepTestHelper', { - signer: admin, - libraries: { Cron: cronExternalLib.address }, - }) - cron = ( - await cronFactory.deploy(owner.address, cronDelegate.address, 5, []) - ).connect(owner) - const cronFactoryContractFactory = await ethers.getContractFactory( - 'CronUpkeepFactory', - { signer: admin, libraries: { Cron: cronExternalLib.address } }, - ) // the typechain factory that creates the cron factory contract - cronFactoryContract = await cronFactoryContractFactory.deploy() - const fs = cronReceiver1.interface.functions - handler1Sig = utils.id(fs['handler1()'].format('sighash')).slice(0, 10) - handler2Sig = utils.id(fs['handler2()'].format('sighash')).slice(0, 10) - revertHandlerSig = utils - .id(fs['revertHandler()'].format('sighash')) - .slice(0, 10) - const cronTHFactory = await ethers.getContractFactory( - 'CronInternalTestHelper', - ) - cronTestHelper = await cronTHFactory.deploy() - basicSpec = await cronFactoryContract.encodeCronString(basicCronString) - }) - - afterEach(async () => { - await h.reset() - }) - - it('has a limited public ABI [ @skip-coverage ]', () => { - // Casting cron is necessary due to a tricky versioning mismatch issue, likely between ethers - // and typechain. Remove once the version issue is resolved. - // https://smartcontract-it.atlassian.net/browse/ARCHIVE-22094 - h.publicAbi(cron as unknown as Contract, [ - 's_maxJobs', - 'performUpkeep', - 'createCronJobFromEncodedSpec', - 'updateCronJob', - 'deleteCronJob', - 'checkUpkeep', - 'getActiveCronJobIDs', - 'getCronJob', - // Ownable methods: - 'acceptOwnership', - 'owner', - 'transferOwnership', - // Pausable methods - 'paused', - 'pause', - 'unpause', - // Cron helper methods - 'createCronJobFromString', - 'txCheckUpkeep', - ]) - }) - - describe('constructor()', () => { - it('sets the initial values', async () => { - expect(await cron.owner()).to.equal(owner.address) - expect(await cron.s_maxJobs()).to.equal(5) - }) - - it('optionally creates a first job', async () => { - const payload = await cronFactoryContract.encodeCronJob( - cronReceiver1.address, - handler1Sig, - basicCronString, - ) - cron = ( - await cronFactory.deploy( - owner.address, - cronDelegate.address, - 5, - payload, - ) - ).connect(owner) - const job = await cron.getCronJob(1) - assert.equal(job.target, cronReceiver1.address) - assert.equal(job.handler, handler1Sig) - assert.equal(job.cronString, basicCronString) - }) - }) - - describe('checkUpkeep() / performUpkeep()', () => { - beforeEach(async () => { - await h.setTimestamp(timeStamp) - // id 1 - await cron.createCronJobFromString( - cronReceiver1.address, - handler1Sig, - '0 0 31 * *', // 31st day of every month - ) - // id 2 - await cron.createCronJobFromString( - cronReceiver1.address, - handler2Sig, - '10 * * * *', // on the 10 min mark - ) - // id 3 - await cron.createCronJobFromString( - cronReceiver2.address, - handler1Sig, - '0 0 * 7 *', // every day in July - ) - // id 4 - await cron.createCronJobFromString( - cronReceiver2.address, - revertHandlerSig, - '20 * * * *', // on the 20 min mark - ) - }) - - describe('checkUpkeep()', () => { - it('returns false if no one is elligible', async () => { - const [needsUpkeep] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isFalse(needsUpkeep) - }) - - it('returns the id of eligible cron jobs', async () => { - await h.fastForward(moment.duration(11, 'minutes').asSeconds()) - const [needsUpkeep, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isTrue(needsUpkeep) - const [id, ..._] = decodePayload(payload) - assert.equal(id.toNumber(), 2) - }) - - describe('when mutiple crons are elligible', () => { - it('cycles through the cron IDs based on block number', async () => { - await h.fastForward(moment.duration(1, 'year').asSeconds()) - let [_, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - const [id1] = decodePayload(payload) - await h.mineBlock(ethers.provider) - ;[_, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - const [id2] = decodePayload(payload) - await h.mineBlock(ethers.provider) - ;[_, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - const [id3] = decodePayload(payload) - await h.mineBlock(ethers.provider) - ;[_, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - const [id4] = decodePayload(payload) - assert.deepEqual( - [id1, id2, id3, id4].map((n) => n.toNumber()).sort(), - [1, 2, 3, 4], - ) - }) - }) - }) - - describe('performUpkeep()', () => { - it('forwards the call to the appropriate target/handler', async () => { - await h.fastForward(moment.duration(11, 'minutes').asSeconds()) - const [needsUpkeep, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isTrue(needsUpkeep) - await expect(cron.performUpkeep(payload)).to.emit( - cronReceiver1, - 'Received2', - ) - }) - - it('emits an event', async () => { - await h.fastForward(moment.duration(11, 'minutes').asSeconds()) - const [needsUpkeep, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isTrue(needsUpkeep) - await expect(cron.performUpkeep(payload)) - .to.emit(cron, 'CronJobExecuted') - .withArgs(2, true) - }) - - it('succeeds even if the call to the target fails', async () => { - await cron.deleteCronJob(2) - await h.fastForward(moment.duration(21, 'minutes').asSeconds()) - const payload = encodePayload([ - 4, - moment.unix(timeStamp).add(20, 'minutes').unix(), - cronReceiver2.address, - revertHandlerSig, - ]) - await expect(cron.performUpkeep(payload)) - .to.emit(cron, 'CronJobExecuted') - .withArgs(4, false) - }) - - it('is only callable by anyone', async () => { - await h.fastForward(moment.duration(11, 'minutes').asSeconds()) - const [needsUpkeep, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isTrue(needsUpkeep) - await cron.connect(stranger).performUpkeep(payload) - }) - - it('is only callable once for a given tick', async () => { - await h.fastForward(moment.duration(10, 'minutes').asSeconds()) - const [needsUpkeep, payload] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isTrue(needsUpkeep) - const maliciousPayload = encodePayload([ - 2, - moment.unix(timeStamp).add(10, 'minutes').add(59, 'seconds').unix(), - cronReceiver1.address, - handler2Sig, - ]) - await cron.performUpkeep(payload) - await expect(cron.performUpkeep(payload)).to.be.reverted - await expect(cron.performUpkeep(maliciousPayload)).to.be.reverted - await h.fastForward(moment.duration(1, 'minute').asSeconds()) - await expect(cron.performUpkeep(payload)).to.be.reverted - await expect(cron.performUpkeep(maliciousPayload)).to.be.reverted - await h.fastForward(moment.duration(10, 'minute').asSeconds()) - await expect(cron.performUpkeep(payload)).to.be.reverted - await expect(cron.performUpkeep(maliciousPayload)).to.be.reverted - }) - }) - }) - - describe('createCronJobFromEncodedSpec()', () => { - it('creates jobs with sequential IDs', async () => { - const cronString1 = '0 * * * *' - const cronString2 = '0 1,2,3 */4 5-6 1-2' - const encodedSpec1 = - await cronFactoryContract.encodeCronString(cronString1) - const encodedSpec2 = - await cronFactoryContract.encodeCronString(cronString2) - const nextTick1 = ( - await cronTestHelper.calculateNextTick(cronString1) - ).toNumber() - const nextTick2 = ( - await cronTestHelper.calculateNextTick(cronString2) - ).toNumber() - await cron.createCronJobFromEncodedSpec( - cronReceiver1.address, - handler1Sig, - encodedSpec1, - ) - await assertJobIDsEqual([1]) - await cron.createCronJobFromEncodedSpec( - cronReceiver1.address, - handler2Sig, - encodedSpec1, - ) - await assertJobIDsEqual([1, 2]) - await cron.createCronJobFromEncodedSpec( - cronReceiver2.address, - handler1Sig, - encodedSpec2, - ) - await assertJobIDsEqual([1, 2, 3]) - await cron.createCronJobFromEncodedSpec( - cronReceiver2.address, - handler2Sig, - encodedSpec2, - ) - await assertJobIDsEqual([1, 2, 3, 4]) - const cron1 = await cron.getCronJob(1) - const cron2 = await cron.getCronJob(2) - const cron3 = await cron.getCronJob(3) - const cron4 = await cron.getCronJob(4) - assert.equal(cron1.target, cronReceiver1.address) - assert.equal(cron1.handler, handler1Sig) - assert.equal(cron1.cronString, cronString1) - assert.equal(cron1.nextTick.toNumber(), nextTick1) - assert.equal(cron2.target, cronReceiver1.address) - assert.equal(cron2.handler, handler2Sig) - assert.equal(cron2.cronString, cronString1) - assert.equal(cron2.nextTick.toNumber(), nextTick1) - assert.equal(cron3.target, cronReceiver2.address) - assert.equal(cron3.handler, handler1Sig) - assert.equal(cron3.cronString, cronString2) - assert.equal(cron3.nextTick.toNumber(), nextTick2) - assert.equal(cron4.target, cronReceiver2.address) - assert.equal(cron4.handler, handler2Sig) - assert.equal(cron4.cronString, cronString2) - assert.equal(cron4.nextTick.toNumber(), nextTick2) - }) - - it('emits an event', async () => { - await expect(createBasicCron()).to.emit(cron, 'CronJobCreated') - }) - - it('is only callable by the owner', async () => { - await expect( - cron - .connect(stranger) - .createCronJobFromEncodedSpec( - cronReceiver1.address, - handler1Sig, - basicSpec, - ), - ).to.be.revertedWith(OWNABLE_ERR) - }) - - it('errors if trying to create more jobs than allowed', async () => { - for (let idx = 0; idx < 5; idx++) { - await createBasicCron() - } - await expect(createBasicCron()).to.be.revertedWithCustomError( - cron, - 'ExceedsMaxJobs', - ) - }) - }) - - describe('updateCronJob()', () => { - const newCronString = '0 0 1 1 1' - let newEncodedSpec: string - beforeEach(async () => { - await createBasicCron() - newEncodedSpec = await cronFactoryContract.encodeCronString(newCronString) - }) - - it('updates a cron job', async () => { - let cron1 = await cron.getCronJob(1) - assert.equal(cron1.target, cronReceiver1.address) - assert.equal(cron1.handler, handler1Sig) - assert.equal(cron1.cronString, basicCronString) - await cron.updateCronJob( - 1, - cronReceiver2.address, - handler2Sig, - newEncodedSpec, - ) - cron1 = await cron.getCronJob(1) - assert.equal(cron1.target, cronReceiver2.address) - assert.equal(cron1.handler, handler2Sig) - assert.equal(cron1.cronString, newCronString) - }) - - it('emits an event', async () => { - await expect( - await cron.updateCronJob( - 1, - cronReceiver2.address, - handler2Sig, - newEncodedSpec, - ), - ).to.emit(cron, 'CronJobUpdated') - }) - - it('is only callable by the owner', async () => { - await expect( - cron - .connect(stranger) - .updateCronJob(1, cronReceiver2.address, handler2Sig, newEncodedSpec), - ).to.be.revertedWith(OWNABLE_ERR) - }) - - it('reverts if trying to update a non-existent ID', async () => { - await expect( - cron.updateCronJob( - 2, - cronReceiver2.address, - handler2Sig, - newEncodedSpec, - ), - ).to.be.revertedWithCustomError(cron, CRON_NOT_FOUND_ERR) - }) - }) - - describe('deleteCronJob()', () => { - it("deletes a jobs by it's ID", async () => { - await createBasicCron() - await createBasicCron() - await createBasicCron() - await createBasicCron() - await assertJobIDsEqual([1, 2, 3, 4]) - await cron.deleteCronJob(2) - await expect(cron.getCronJob(2)).to.be.revertedWithCustomError( - cron, - CRON_NOT_FOUND_ERR, - ) - await expect(cron.deleteCronJob(2)).to.be.revertedWithCustomError( - cron, - CRON_NOT_FOUND_ERR, - ) - await assertJobIDsEqual([1, 3, 4]) - await cron.deleteCronJob(1) - await assertJobIDsEqual([3, 4]) - await cron.deleteCronJob(4) - await assertJobIDsEqual([3]) - await cron.deleteCronJob(3) - await assertJobIDsEqual([]) - }) - - it('emits an event', async () => { - await createBasicCron() - await expect(cron.deleteCronJob(1)).to.emit(cron, 'CronJobDeleted') - }) - - it('reverts if trying to delete a non-existent ID', async () => { - await createBasicCron() - await createBasicCron() - await expect(cron.deleteCronJob(0)).to.be.revertedWithCustomError( - cron, - CRON_NOT_FOUND_ERR, - ) - await expect(cron.deleteCronJob(3)).to.be.revertedWithCustomError( - cron, - CRON_NOT_FOUND_ERR, - ) - }) - }) - - describe('pause() / unpause()', () => { - it('is only callable by the owner', async () => { - await expect(cron.connect(stranger).pause()).to.be.reverted - await expect(cron.connect(stranger).unpause()).to.be.reverted - }) - - it('pauses / unpauses the contract', async () => { - expect(await cron.paused()).to.be.false - await cron.pause() - expect(await cron.paused()).to.be.true - await cron.unpause() - expect(await cron.paused()).to.be.false - }) - }) -}) - -// only run during pnpm test:gas -describe.skip('Cron Gas Usage', () => { - before(async () => { - const accounts = await ethers.getSigners() - admin = accounts[0] - owner = accounts[1] - const crFactory = await ethers.getContractFactory('CronReceiver', owner) - cronReceiver1 = await crFactory.deploy() - const cronDelegateFactory = await ethers.getContractFactory( - 'CronUpkeepDelegate', - owner, - ) - const cronDelegate = await cronDelegateFactory.deploy() - const cronExternalFactory = await ethers.getContractFactory( - 'src/v0.8/automation/libraries/external/Cron.sol:Cron', - admin, - ) - const cronExternalLib = await cronExternalFactory.deploy() - const cronFactory = await ethers.getContractFactory( - 'CronUpkeepTestHelper', - { - signer: owner, - libraries: { Cron: cronExternalLib.address }, - }, - ) - cron = await cronFactory.deploy(owner.address, cronDelegate.address, 5, []) - const fs = cronReceiver1.interface.functions - handler1Sig = utils - .id(fs['handler1()'].format('sighash')) // TODO this seems like an ethers bug - .slice(0, 10) - }) - - describe('checkUpkeep() / performUpkeep()', () => { - it('uses gas', async () => { - for (let idx = 0; idx < validCrons.length; idx++) { - const cronString = validCrons[idx] - const cronID = idx + 1 - await cron.createCronJobFromString( - cronReceiver1.address, - handler1Sig, - cronString, - ) - await h.fastForward(moment.duration(100, 'years').asSeconds()) // long enough that at least 1 tick occurs - const [needsUpkeep, data] = await cron - .connect(AddressZero) - .callStatic.checkUpkeep('0x') - assert.isTrue(needsUpkeep, `failed for cron string ${cronString}`) - await cron.txCheckUpkeep('0x') - await cron.performUpkeep(data) - await cron.deleteCronJob(cronID) - } - }) - }) -}) diff --git a/contracts/test/v0.8/automation/CronUpkeepFactory.test.ts b/contracts/test/v0.8/automation/CronUpkeepFactory.test.ts deleted file mode 100644 index e9a7de837b7..00000000000 --- a/contracts/test/v0.8/automation/CronUpkeepFactory.test.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { ethers } from 'hardhat' -import { Contract } from 'ethers' -import { assert, expect } from 'chai' -import { CronUpkeepFactory } from '../../../typechain/CronUpkeepFactory' -import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import * as h from '../../test-helpers/helpers' -import { reset } from '../../test-helpers/helpers' - -const OWNABLE_ERR = 'Only callable by owner' - -let cronExternalLib: Contract -let factory: CronUpkeepFactory - -let admin: SignerWithAddress -let owner: SignerWithAddress -let stranger: SignerWithAddress - -describe('CronUpkeepFactory', () => { - beforeEach(async () => { - const accounts = await ethers.getSigners() - admin = accounts[0] - owner = accounts[1] - stranger = accounts[2] - const cronExternalFactory = await ethers.getContractFactory( - 'src/v0.8/automation/libraries/external/Cron.sol:Cron', - admin, - ) - cronExternalLib = await cronExternalFactory.deploy() - const cronUpkeepFactoryFactory = await ethers.getContractFactory( - 'CronUpkeepFactory', - { - signer: admin, - libraries: { - Cron: cronExternalLib.address, - }, - }, - ) - factory = await cronUpkeepFactoryFactory.deploy() - }) - - afterEach(async () => { - await reset() - }) - - it('has a limited public ABI [ @skip-coverage ]', () => { - h.publicAbi(factory as unknown as Contract, [ - 's_maxJobs', - 'newCronUpkeep', - 'newCronUpkeepWithJob', - 'setMaxJobs', - 'cronDelegateAddress', - 'encodeCronString', - 'encodeCronJob', - // Ownable methods: - 'acceptOwnership', - 'owner', - 'transferOwnership', - ]) - }) - - describe('constructor()', () => { - it('deploys a delegate contract', async () => { - assert.notEqual( - await factory.cronDelegateAddress(), - ethers.constants.AddressZero, - ) - }) - }) - - describe('newCronUpkeep()', () => { - it('emits an event', async () => { - await expect(factory.connect(owner).newCronUpkeep()).to.emit( - factory, - 'NewCronUpkeepCreated', - ) - }) - it('sets the deployer as the owner', async () => { - const response = await factory.connect(owner).newCronUpkeep() - const { events } = await response.wait() - if (!events) { - assert.fail('no events emitted') - } - const upkeepAddress = events[0].args?.upkeep - const cronUpkeepFactory = await ethers.getContractFactory('CronUpkeep', { - libraries: { Cron: cronExternalLib.address }, - }) - assert( - await cronUpkeepFactory.attach(upkeepAddress).owner(), - owner.address, - ) - }) - }) - - describe('setMaxJobs()', () => { - it('sets the max jobs value', async () => { - expect(await factory.s_maxJobs()).to.equal(5) - await factory.setMaxJobs(6) - expect(await factory.s_maxJobs()).to.equal(6) - }) - - it('is only callable by the owner', async () => { - await expect(factory.connect(stranger).setMaxJobs(6)).to.be.revertedWith( - OWNABLE_ERR, - ) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/ERC20BalanceMonitor.test.ts b/contracts/test/v0.8/automation/ERC20BalanceMonitor.test.ts deleted file mode 100644 index 2d5d113abca..00000000000 --- a/contracts/test/v0.8/automation/ERC20BalanceMonitor.test.ts +++ /dev/null @@ -1,695 +0,0 @@ -import { ethers } from 'hardhat' -import { assert, expect } from 'chai' -import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import { ReceiveEmitter } from '../../../typechain/ReceiveEmitter' -import { ReceiveFallbackEmitter } from '../../../typechain/ReceiveFallbackEmitter' -import * as h from '../../test-helpers/helpers' -import { ERC20BalanceMonitorExposed, LinkToken } from '../../../typechain' -import { BigNumber } from 'ethers' - -const OWNABLE_ERR = 'Only callable by owner' -const INVALID_WATCHLIST_ERR = `InvalidWatchList` -const PAUSED_ERR = 'Pausable: paused' -const ONLY_KEEPER_ERR = `OnlyKeeperRegistry` - -const zeroLINK = ethers.utils.parseEther('0') -const oneLINK = ethers.utils.parseEther('1') -const twoLINK = ethers.utils.parseEther('2') -const threeLINK = ethers.utils.parseEther('3') -const fiveLINK = ethers.utils.parseEther('5') -const sixLINK = ethers.utils.parseEther('6') -const tenLINK = ethers.utils.parseEther('10') - -const oneHundredLINK = ethers.utils.parseEther('100') - -const watchAddress1 = ethers.Wallet.createRandom().address -const watchAddress2 = ethers.Wallet.createRandom().address -const watchAddress3 = ethers.Wallet.createRandom().address -const watchAddress4 = ethers.Wallet.createRandom().address -let watchAddress5: string -let watchAddress6: string - -let bm: ERC20BalanceMonitorExposed -let lt: LinkToken -let receiveEmitter: ReceiveEmitter -let receiveFallbackEmitter: ReceiveFallbackEmitter -let owner: SignerWithAddress -let stranger: SignerWithAddress -let keeperRegistry: SignerWithAddress - -async function assertWatchlistBalances( - balance1: BigNumber, - balance2: BigNumber, - balance3: BigNumber, - balance4: BigNumber, - balance5: BigNumber, - balance6: BigNumber, -) { - await h.assertLinkTokenBalance(lt, watchAddress1, balance1, 'address 1') - await h.assertLinkTokenBalance(lt, watchAddress2, balance2, 'address 2') - await h.assertLinkTokenBalance(lt, watchAddress3, balance3, 'address 3') - await h.assertLinkTokenBalance(lt, watchAddress4, balance4, 'address 4') - await h.assertLinkTokenBalance(lt, watchAddress5, balance5, 'address 5') - await h.assertLinkTokenBalance(lt, watchAddress6, balance6, 'address 6') -} - -describe('ERC20BalanceMonitor', () => { - beforeEach(async () => { - const accounts = await ethers.getSigners() - owner = accounts[0] - stranger = accounts[1] - keeperRegistry = accounts[2] - watchAddress5 = accounts[3].address - watchAddress6 = accounts[4].address - - const bmFactory = await ethers.getContractFactory( - 'ERC20BalanceMonitorExposed', - owner, - ) - const ltFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - owner, - ) - const reFactory = await ethers.getContractFactory('ReceiveEmitter', owner) - const rfeFactory = await ethers.getContractFactory( - 'ReceiveFallbackEmitter', - owner, - ) - - lt = await ltFactory.deploy() - bm = await bmFactory.deploy(lt.address, keeperRegistry.address, 0) - - for (let i = 1; i <= 4; i++) { - const recipient = await accounts[i].getAddress() - await lt.connect(owner).transfer(recipient, oneHundredLINK) - } - - receiveEmitter = await reFactory.deploy() - receiveFallbackEmitter = await rfeFactory.deploy() - await Promise.all([ - bm.deployed(), - receiveEmitter.deployed(), - receiveFallbackEmitter.deployed(), - ]) - }) - - afterEach(async () => { - await h.reset() - }) - - describe('add funds', () => { - it('Should allow anyone to add funds', async () => { - await lt.transfer(bm.address, oneLINK) - await lt.connect(stranger).transfer(bm.address, oneLINK) - }) - }) - - describe('withdraw()', () => { - beforeEach(async () => { - const tx = await lt.connect(owner).transfer(bm.address, oneLINK) - await tx.wait() - }) - - it('Should allow the owner to withdraw', async () => { - const beforeBalance = await lt.balanceOf(owner.address) - const tx = await bm.connect(owner).withdraw(oneLINK, owner.address) - await tx.wait() - const afterBalance = await lt.balanceOf(owner.address) - assert.isTrue( - afterBalance.gt(beforeBalance), - 'balance did not increase after withdraw', - ) - }) - - it('Should emit an event', async () => { - const tx = await bm.connect(owner).withdraw(oneLINK, owner.address) - await expect(tx) - .to.emit(bm, 'FundsWithdrawn') - .withArgs(oneLINK, owner.address) - }) - - it('Should allow the owner to withdraw to anyone', async () => { - const beforeBalance = await lt.balanceOf(stranger.address) - const tx = await bm.connect(owner).withdraw(oneLINK, stranger.address) - await tx.wait() - const afterBalance = await lt.balanceOf(stranger.address) - assert.isTrue( - beforeBalance.add(oneLINK).eq(afterBalance), - 'balance did not increase after withdraw', - ) - }) - - it('Should not allow strangers to withdraw', async () => { - const tx = bm.connect(stranger).withdraw(oneLINK, owner.address) - await expect(tx).to.be.revertedWith(OWNABLE_ERR) - }) - }) - - describe('pause() / unpause()', () => { - it('Should allow owner to pause / unpause', async () => { - const pauseTx = await bm.connect(owner).pause() - await pauseTx.wait() - const unpauseTx = await bm.connect(owner).unpause() - await unpauseTx.wait() - }) - - it('Should not allow strangers to pause / unpause', async () => { - const pauseTxStranger = bm.connect(stranger).pause() - await expect(pauseTxStranger).to.be.revertedWith(OWNABLE_ERR) - const pauseTxOwner = await bm.connect(owner).pause() - await pauseTxOwner.wait() - const unpauseTxStranger = bm.connect(stranger).unpause() - await expect(unpauseTxStranger).to.be.revertedWith(OWNABLE_ERR) - }) - }) - - describe('setWatchList() / getWatchList() / getAccountInfo()', () => { - it('Should allow owner to set the watchlist', async () => { - // should start unactive - assert.isFalse((await bm.getAccountInfo(watchAddress1)).isActive) - // add first watchlist - let setTx = await bm - .connect(owner) - .setWatchList([watchAddress1], [oneLINK], [twoLINK]) - await setTx.wait() - let watchList = await bm.getWatchList() - assert.deepEqual(watchList, [watchAddress1]) - const accountInfo = await bm.getAccountInfo(watchAddress1) - assert.isTrue(accountInfo.isActive) - expect(accountInfo.minBalance).to.equal(oneLINK) - expect(accountInfo.topUpLevel).to.equal(twoLINK) - // add more to watchlist - setTx = await bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress3], - [oneLINK, twoLINK, threeLINK], - [twoLINK, threeLINK, fiveLINK], - ) - await setTx.wait() - watchList = await bm.getWatchList() - assert.deepEqual(watchList, [watchAddress1, watchAddress2, watchAddress3]) - let accountInfo1 = await bm.getAccountInfo(watchAddress1) - let accountInfo2 = await bm.getAccountInfo(watchAddress2) - let accountInfo3 = await bm.getAccountInfo(watchAddress3) - expect(accountInfo1.isActive).to.be.true - expect(accountInfo1.minBalance).to.equal(oneLINK) - expect(accountInfo1.topUpLevel).to.equal(twoLINK) - expect(accountInfo2.isActive).to.be.true - expect(accountInfo2.minBalance).to.equal(twoLINK) - expect(accountInfo2.topUpLevel).to.equal(threeLINK) - expect(accountInfo3.isActive).to.be.true - expect(accountInfo3.minBalance).to.equal(threeLINK) - expect(accountInfo3.topUpLevel).to.equal(fiveLINK) - // remove some from watchlist - setTx = await bm - .connect(owner) - .setWatchList( - [watchAddress3, watchAddress1], - [threeLINK, oneLINK], - [fiveLINK, twoLINK], - ) - await setTx.wait() - watchList = await bm.getWatchList() - assert.deepEqual(watchList, [watchAddress3, watchAddress1]) - accountInfo1 = await bm.getAccountInfo(watchAddress1) - accountInfo2 = await bm.getAccountInfo(watchAddress2) - accountInfo3 = await bm.getAccountInfo(watchAddress3) - expect(accountInfo1.isActive).to.be.true - expect(accountInfo2.isActive).to.be.false - expect(accountInfo3.isActive).to.be.true - }) - - it('Should not allow duplicates in the watchlist', async () => { - const errMsg = `DuplicateAddress` - const setTx = bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress1], - [oneLINK, twoLINK, threeLINK], - [twoLINK, threeLINK, fiveLINK], - ) - await expect(setTx) - .to.be.revertedWithCustomError(bm, errMsg) - .withArgs(watchAddress1) - }) - - it('Should not allow a topUpLevel les than or equal to minBalance in the watchlist', async () => { - const setTx = bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress1], - [oneLINK, twoLINK, threeLINK], - [zeroLINK, twoLINK, threeLINK], - ) - await expect(setTx).to.be.revertedWithCustomError( - bm, - INVALID_WATCHLIST_ERR, - ) - }) - - it('Should not allow larger than maximum watchlist size', async () => { - const watchlist: any[][] = [[], [], []] - Array.from(Array(301).keys()).forEach(() => { - watchlist[0].push(owner.address) - watchlist[1].push(oneLINK) - watchlist[2].push(twoLINK) - }) - const tx = bm - .connect(owner) - .setWatchList(watchlist[0], watchlist[1], watchlist[2]) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - - it('Should not allow strangers to set the watchlist', async () => { - const setTxStranger = bm - .connect(stranger) - .setWatchList([watchAddress1], [oneLINK], [twoLINK]) - await expect(setTxStranger).to.be.revertedWith(OWNABLE_ERR) - }) - - it('Should revert if the list lengths differ', async () => { - let tx = bm.connect(owner).setWatchList([watchAddress1], [], [twoLINK]) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - tx = bm.connect(owner).setWatchList([watchAddress1], [oneLINK], []) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - tx = bm.connect(owner).setWatchList([], [oneLINK], [twoLINK]) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - - it('Should revert if any of the addresses are empty', async () => { - let tx = bm - .connect(owner) - .setWatchList( - [watchAddress1, ethers.constants.AddressZero], - [oneLINK, oneLINK], - [twoLINK, twoLINK], - ) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - - it('Should revert if any of the top up amounts are 0', async () => { - const tx = bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2], - [oneLINK, oneLINK], - [twoLINK, zeroLINK], - ) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - }) - - describe('getKeeperRegistryAddress() / setKeeperRegistryAddress()', () => { - const newAddress = ethers.Wallet.createRandom().address - - it('Should initialize with the registry address provided to the constructor', async () => { - const address = await bm.getKeeperRegistryAddress() - assert.equal(address, keeperRegistry.address) - }) - - it('Should allow the owner to set the registry address', async () => { - const setTx = await bm.connect(owner).setKeeperRegistryAddress(newAddress) - await setTx.wait() - const address = await bm.getKeeperRegistryAddress() - assert.equal(address, newAddress) - }) - - it('Should not allow strangers to set the registry address', async () => { - const setTx = bm.connect(stranger).setKeeperRegistryAddress(newAddress) - await expect(setTx).to.be.revertedWith(OWNABLE_ERR) - }) - - it('Should emit an event', async () => { - const setTx = await bm.connect(owner).setKeeperRegistryAddress(newAddress) - await expect(setTx) - .to.emit(bm, 'KeeperRegistryAddressUpdated') - .withArgs(keeperRegistry.address, newAddress) - }) - }) - - describe('getMinWaitPeriodSeconds / setMinWaitPeriodSeconds()', () => { - const newWaitPeriod = BigNumber.from(1) - - it('Should initialize with the wait period provided to the constructor', async () => { - const minWaitPeriod = await bm.getMinWaitPeriodSeconds() - expect(minWaitPeriod).to.equal(0) - }) - - it('Should allow owner to set the wait period', async () => { - const setTx = await bm - .connect(owner) - .setMinWaitPeriodSeconds(newWaitPeriod) - await setTx.wait() - const minWaitPeriod = await bm.getMinWaitPeriodSeconds() - expect(minWaitPeriod).to.equal(newWaitPeriod) - }) - - it('Should not allow strangers to set the wait period', async () => { - const setTx = bm.connect(stranger).setMinWaitPeriodSeconds(newWaitPeriod) - await expect(setTx).to.be.revertedWith(OWNABLE_ERR) - }) - - it('Should emit an event', async () => { - const setTx = await bm - .connect(owner) - .setMinWaitPeriodSeconds(newWaitPeriod) - await expect(setTx) - .to.emit(bm, 'MinWaitPeriodUpdated') - .withArgs(0, newWaitPeriod) - }) - }) - - describe('checkUpkeep() / getUnderfundedAddresses()', () => { - beforeEach(async () => { - const setTx = await bm.connect(owner).setWatchList( - [ - watchAddress1, // needs funds - watchAddress5, // funded - watchAddress2, // needs funds - watchAddress6, // funded - watchAddress3, // needs funds - ], - new Array(5).fill(oneLINK), - new Array(5).fill(twoLINK), - ) - await setTx.wait() - }) - - it('Should return list of address that are underfunded', async () => { - const fundTx = await lt.connect(owner).transfer( - bm.address, - sixLINK, // needs 6 total - ) - await fundTx.wait() - const [should, payload] = await bm.checkUpkeep('0x') - assert.isTrue(should) - let [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - assert.deepEqual(addresses, [watchAddress1, watchAddress2, watchAddress3]) - // checkUpkeep payload should match getUnderfundedAddresses() - addresses = await bm.getUnderfundedAddresses() - assert.deepEqual(addresses, [watchAddress1, watchAddress2, watchAddress3]) - }) - - it('Should return some results even if contract cannot fund all eligible targets', async () => { - const fundTx = await lt.connect(owner).transfer( - bm.address, - fiveLINK, // needs 6 total - ) - await fundTx.wait() - const [should, payload] = await bm.checkUpkeep('0x') - assert.isTrue(should) - const [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - assert.deepEqual(addresses, [watchAddress1, watchAddress2]) - }) - - it('Should omit addresses that have been funded recently', async () => { - const setWaitPdTx = await bm.setMinWaitPeriodSeconds(3600) // 1 hour - const fundTx = await lt.connect(owner).transfer(bm.address, sixLINK) - await Promise.all([setWaitPdTx.wait(), fundTx.wait()]) - const block = await ethers.provider.getBlock('latest') - const setTopUpTx = await bm.setLastTopUpXXXTestOnly( - watchAddress2, - block.timestamp - 100, - ) - await setTopUpTx.wait() - const [should, payload] = await bm.checkUpkeep('0x') - assert.isTrue(should) - const [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - assert.deepEqual(addresses, [watchAddress1, watchAddress3]) - }) - - it('Should revert when paused', async () => { - const tx = await bm.connect(owner).pause() - await tx.wait() - const ethCall = bm.checkUpkeep('0x') - await expect(ethCall).to.be.revertedWith(PAUSED_ERR) - }) - }) - - describe('performUpkeep()', () => { - let validPayload: string - let invalidPayload: string - - beforeEach(async () => { - validPayload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [[watchAddress1, watchAddress2, watchAddress3]], - ) - invalidPayload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [[watchAddress1, watchAddress2, watchAddress4, watchAddress5]], - ) - const setTx = await bm.connect(owner).setWatchList( - [ - watchAddress1, // needs funds - watchAddress5, // funded - watchAddress2, // needs funds - watchAddress6, // funded - watchAddress3, // needs funds - // watchAddress4 - omitted - ], - new Array(5).fill(oneLINK), - new Array(5).fill(twoLINK), - ) - await setTx.wait() - }) - - it('Should revert when paused', async () => { - const pauseTx = await bm.connect(owner).pause() - await pauseTx.wait() - const performTx = bm.connect(keeperRegistry).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWith(PAUSED_ERR) - }) - - context('when partially funded', () => { - it('Should fund as many addresses as possible', async () => { - const fundTx = await lt.connect(owner).transfer( - bm.address, - fiveLINK, // only enough LINK to fund 2 addresses - ) - await fundTx.wait() - await assertWatchlistBalances( - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload) - await assertWatchlistBalances( - twoLINK, - twoLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(watchAddress1) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(watchAddress2) - }) - }) - - context('when fully funded', () => { - beforeEach(async () => { - const fundTx = await lt.connect(owner).transfer(bm.address, tenLINK) - await fundTx.wait() - }) - - it('Should fund the appropriate addresses', async () => { - await assertWatchlistBalances( - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances( - twoLINK, - twoLINK, - twoLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - }) - - it('Should only fund active, underfunded addresses', async () => { - await assertWatchlistBalances( - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(invalidPayload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances( - twoLINK, - twoLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - }) - - it('Should not fund addresses that have been funded recently', async () => { - const setWaitPdTx = await bm.setMinWaitPeriodSeconds(3600) // 1 hour - await setWaitPdTx.wait() - const block = await ethers.provider.getBlock('latest') - const setTopUpTx = await bm.setLastTopUpXXXTestOnly( - watchAddress2, - block.timestamp - 100, - ) - await setTopUpTx.wait() - await assertWatchlistBalances( - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances( - twoLINK, - zeroLINK, - twoLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - }) - - it('Should only be callable by the keeper registry contract', async () => { - let performTx = bm.connect(owner).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWithCustomError( - bm, - ONLY_KEEPER_ERR, - ) - performTx = bm.connect(stranger).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWithCustomError( - bm, - ONLY_KEEPER_ERR, - ) - }) - - it('Should protect against running out of gas', async () => { - await assertWatchlistBalances( - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - oneHundredLINK, - oneHundredLINK, - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 130_000 }) // too little for all 3 transfers - await performTx.wait() - const balance1 = await lt.balanceOf(watchAddress1) - const balance2 = await lt.balanceOf(watchAddress2) - const balance3 = await lt.balanceOf(watchAddress3) - const balances = [balance1, balance2, balance3].map((n) => n.toString()) - expect(balances) - .to.include(twoLINK.toString()) // expect at least 1 transfer - .to.include(zeroLINK.toString()) // expect at least 1 out of funds - }) - - it('Should provide enough gas to support receive and fallback functions', async () => { - const addresses = [ - receiveEmitter.address, - receiveFallbackEmitter.address, - ] - const payload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [addresses], - ) - const setTx = await bm - .connect(owner) - .setWatchList( - addresses, - new Array(2).fill(oneLINK), - new Array(2).fill(twoLINK), - ) - await setTx.wait() - - const reBalanceBefore = await lt.balanceOf(receiveEmitter.address) - const rfeBalanceBefore = await lt.balanceOf( - receiveFallbackEmitter.address, - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(payload, { gasLimit: 2_500_000 }) - await h.assertLinkTokenBalance( - lt, - receiveEmitter.address, - reBalanceBefore.add(twoLINK), - ) - await h.assertLinkTokenBalance( - lt, - receiveFallbackEmitter.address, - rfeBalanceBefore.add(twoLINK), - ) - - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(receiveEmitter.address) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(receiveFallbackEmitter.address) - }) - }) - }) - - describe('topUp()', () => { - context('when not paused', () => { - it('Should be callable by anyone', async () => { - const users = [owner, keeperRegistry, stranger] - for (let idx = 0; idx < users.length; idx++) { - const user = users[idx] - await bm.connect(user).topUp([]) - } - }) - }) - context('when paused', () => { - it('Should be callable by no one', async () => { - await bm.connect(owner).pause() - const users = [owner, keeperRegistry, stranger] - for (let idx = 0; idx < users.length; idx++) { - const user = users[idx] - const tx = bm.connect(user).topUp([]) - await expect(tx).to.be.revertedWith(PAUSED_ERR) - } - }) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/EthBalanceMonitor.test.ts b/contracts/test/v0.8/automation/EthBalanceMonitor.test.ts deleted file mode 100644 index edcf1b564c9..00000000000 --- a/contracts/test/v0.8/automation/EthBalanceMonitor.test.ts +++ /dev/null @@ -1,663 +0,0 @@ -import { ethers } from 'hardhat' -import { assert, expect } from 'chai' -import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import { EthBalanceMonitorExposed } from '../../../typechain/EthBalanceMonitorExposed' -import { ReceiveReverter } from '../../../typechain/ReceiveReverter' -import { ReceiveEmitter } from '../../../typechain/ReceiveEmitter' -import { ReceiveFallbackEmitter } from '../../../typechain/ReceiveFallbackEmitter' -import { BigNumber } from 'ethers' -import * as h from '../../test-helpers/helpers' - -const OWNABLE_ERR = 'Only callable by owner' -const INVALID_WATCHLIST_ERR = `InvalidWatchList` -const PAUSED_ERR = 'Pausable: paused' -const ONLY_KEEPER_ERR = `OnlyKeeperRegistry` - -const zeroEth = ethers.utils.parseEther('0') -const oneEth = ethers.utils.parseEther('1') -const twoEth = ethers.utils.parseEther('2') -const threeEth = ethers.utils.parseEther('3') -const fiveEth = ethers.utils.parseEther('5') -const sixEth = ethers.utils.parseEther('6') -const tenEth = ethers.utils.parseEther('10') - -const watchAddress1 = ethers.Wallet.createRandom().address -const watchAddress2 = ethers.Wallet.createRandom().address -const watchAddress3 = ethers.Wallet.createRandom().address -const watchAddress4 = ethers.Wallet.createRandom().address -let watchAddress5: string -let watchAddress6: string - -async function assertWatchlistBalances( - balance1: number, - balance2: number, - balance3: number, - balance4: number, - balance5: number, - balance6: number, -) { - const toEth = (n: number) => ethers.utils.parseUnits(n.toString(), 'ether') - await h.assertBalance(watchAddress1, toEth(balance1), 'address 1') - await h.assertBalance(watchAddress2, toEth(balance2), 'address 2') - await h.assertBalance(watchAddress3, toEth(balance3), 'address 3') - await h.assertBalance(watchAddress4, toEth(balance4), 'address 4') - await h.assertBalance(watchAddress5, toEth(balance5), 'address 5') - await h.assertBalance(watchAddress6, toEth(balance6), 'address 6') -} - -let bm: EthBalanceMonitorExposed -let receiveReverter: ReceiveReverter -let receiveEmitter: ReceiveEmitter -let receiveFallbackEmitter: ReceiveFallbackEmitter -let owner: SignerWithAddress -let stranger: SignerWithAddress -let keeperRegistry: SignerWithAddress - -describe('EthBalanceMonitor', () => { - beforeEach(async () => { - const accounts = await ethers.getSigners() - owner = accounts[0] - stranger = accounts[1] - keeperRegistry = accounts[2] - watchAddress5 = accounts[3].address - watchAddress6 = accounts[4].address - - const bmFactory = await ethers.getContractFactory( - 'EthBalanceMonitorExposed', - owner, - ) - const rrFactory = await ethers.getContractFactory('ReceiveReverter', owner) - const reFactory = await ethers.getContractFactory('ReceiveEmitter', owner) - const rfeFactory = await ethers.getContractFactory( - 'ReceiveFallbackEmitter', - owner, - ) - - bm = await bmFactory.deploy(keeperRegistry.address, 0) - receiveReverter = await rrFactory.deploy() - receiveEmitter = await reFactory.deploy() - receiveFallbackEmitter = await rfeFactory.deploy() - await Promise.all([ - bm.deployed(), - receiveReverter.deployed(), - receiveEmitter.deployed(), - receiveFallbackEmitter.deployed(), - ]) - }) - - afterEach(async () => { - await h.reset() - }) - - describe('receive()', () => { - it('Should allow anyone to add funds', async () => { - await owner.sendTransaction({ - to: bm.address, - value: oneEth, - }) - await stranger.sendTransaction({ - to: bm.address, - value: oneEth, - }) - }) - - it('Should emit an event', async () => { - await owner.sendTransaction({ - to: bm.address, - value: oneEth, - }) - const tx = stranger.sendTransaction({ - to: bm.address, - value: oneEth, - }) - await expect(tx) - .to.emit(bm, 'FundsAdded') - .withArgs(oneEth, twoEth, stranger.address) - }) - }) - - describe('withdraw()', () => { - beforeEach(async () => { - const tx = await owner.sendTransaction({ - to: bm.address, - value: oneEth, - }) - await tx.wait() - }) - - it('Should allow the owner to withdraw', async () => { - const beforeBalance = await owner.getBalance() - const tx = await bm.connect(owner).withdraw(oneEth, owner.address) - await tx.wait() - const afterBalance = await owner.getBalance() - assert.isTrue( - afterBalance.gt(beforeBalance), - 'balance did not increase after withdraw', - ) - }) - - it('Should emit an event', async () => { - const tx = await bm.connect(owner).withdraw(oneEth, owner.address) - await expect(tx) - .to.emit(bm, 'FundsWithdrawn') - .withArgs(oneEth, owner.address) - }) - - it('Should allow the owner to withdraw to anyone', async () => { - const beforeBalance = await stranger.getBalance() - const tx = await bm.connect(owner).withdraw(oneEth, stranger.address) - await tx.wait() - const afterBalance = await stranger.getBalance() - assert.isTrue( - beforeBalance.add(oneEth).eq(afterBalance), - 'balance did not increase after withdraw', - ) - }) - - it('Should not allow strangers to withdraw', async () => { - const tx = bm.connect(stranger).withdraw(oneEth, owner.address) - await expect(tx).to.be.revertedWith(OWNABLE_ERR) - }) - }) - - describe('pause() / unpause()', () => { - it('Should allow owner to pause / unpause', async () => { - const pauseTx = await bm.connect(owner).pause() - await pauseTx.wait() - const unpauseTx = await bm.connect(owner).unpause() - await unpauseTx.wait() - }) - - it('Should not allow strangers to pause / unpause', async () => { - const pauseTxStranger = bm.connect(stranger).pause() - await expect(pauseTxStranger).to.be.revertedWith(OWNABLE_ERR) - const pauseTxOwner = await bm.connect(owner).pause() - await pauseTxOwner.wait() - const unpauseTxStranger = bm.connect(stranger).unpause() - await expect(unpauseTxStranger).to.be.revertedWith(OWNABLE_ERR) - }) - }) - - describe('setWatchList() / getWatchList() / getAccountInfo()', () => { - it('Should allow owner to set the watchlist', async () => { - // should start unactive - assert.isFalse((await bm.getAccountInfo(watchAddress1)).isActive) - // add first watchlist - let setTx = await bm - .connect(owner) - .setWatchList([watchAddress1], [oneEth], [twoEth]) - await setTx.wait() - let watchList = await bm.getWatchList() - assert.deepEqual(watchList, [watchAddress1]) - const accountInfo = await bm.getAccountInfo(watchAddress1) - assert.isTrue(accountInfo.isActive) - expect(accountInfo.minBalanceWei).to.equal(oneEth) - expect(accountInfo.topUpAmountWei).to.equal(twoEth) - // add more to watchlist - setTx = await bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress3], - [oneEth, twoEth, threeEth], - [oneEth, twoEth, threeEth], - ) - await setTx.wait() - watchList = await bm.getWatchList() - assert.deepEqual(watchList, [watchAddress1, watchAddress2, watchAddress3]) - let accountInfo1 = await bm.getAccountInfo(watchAddress1) - let accountInfo2 = await bm.getAccountInfo(watchAddress2) - let accountInfo3 = await bm.getAccountInfo(watchAddress3) - expect(accountInfo1.isActive).to.be.true - expect(accountInfo1.minBalanceWei).to.equal(oneEth) - expect(accountInfo1.topUpAmountWei).to.equal(oneEth) - expect(accountInfo2.isActive).to.be.true - expect(accountInfo2.minBalanceWei).to.equal(twoEth) - expect(accountInfo2.topUpAmountWei).to.equal(twoEth) - expect(accountInfo3.isActive).to.be.true - expect(accountInfo3.minBalanceWei).to.equal(threeEth) - expect(accountInfo3.topUpAmountWei).to.equal(threeEth) - // remove some from watchlist - setTx = await bm - .connect(owner) - .setWatchList( - [watchAddress3, watchAddress1], - [threeEth, oneEth], - [threeEth, oneEth], - ) - await setTx.wait() - watchList = await bm.getWatchList() - assert.deepEqual(watchList, [watchAddress3, watchAddress1]) - accountInfo1 = await bm.getAccountInfo(watchAddress1) - accountInfo2 = await bm.getAccountInfo(watchAddress2) - accountInfo3 = await bm.getAccountInfo(watchAddress3) - expect(accountInfo1.isActive).to.be.true - expect(accountInfo2.isActive).to.be.false - expect(accountInfo3.isActive).to.be.true - }) - - it('Should not allow duplicates in the watchlist', async () => { - const errMsg = `DuplicateAddress` - const setTx = bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress1], - [oneEth, twoEth, threeEth], - [oneEth, twoEth, threeEth], - ) - await expect(setTx) - .to.be.revertedWithCustomError(bm, errMsg) - .withArgs(watchAddress1) - }) - - it('Should not allow strangers to set the watchlist', async () => { - const setTxStranger = bm - .connect(stranger) - .setWatchList([watchAddress1], [oneEth], [twoEth]) - await expect(setTxStranger).to.be.revertedWith(OWNABLE_ERR) - }) - - it('Should revert if the list lengths differ', async () => { - let tx = bm.connect(owner).setWatchList([watchAddress1], [], [twoEth]) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - tx = bm.connect(owner).setWatchList([watchAddress1], [oneEth], []) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - tx = bm.connect(owner).setWatchList([], [oneEth], [twoEth]) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - - it('Should revert if any of the addresses are empty', async () => { - let tx = bm - .connect(owner) - .setWatchList( - [watchAddress1, ethers.constants.AddressZero], - [oneEth, oneEth], - [twoEth, twoEth], - ) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - - it('Should revert if any of the top up amounts are 0', async () => { - const tx = bm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2], - [oneEth, oneEth], - [twoEth, zeroEth], - ) - await expect(tx).to.be.revertedWithCustomError(bm, INVALID_WATCHLIST_ERR) - }) - }) - - describe('getKeeperRegistryAddress() / setKeeperRegistryAddress()', () => { - const newAddress = ethers.Wallet.createRandom().address - - it('Should initialize with the registry address provided to the constructor', async () => { - const address = await bm.getKeeperRegistryAddress() - assert.equal(address, keeperRegistry.address) - }) - - it('Should allow the owner to set the registry address', async () => { - const setTx = await bm.connect(owner).setKeeperRegistryAddress(newAddress) - await setTx.wait() - const address = await bm.getKeeperRegistryAddress() - assert.equal(address, newAddress) - }) - - it('Should not allow strangers to set the registry address', async () => { - const setTx = bm.connect(stranger).setKeeperRegistryAddress(newAddress) - await expect(setTx).to.be.revertedWith(OWNABLE_ERR) - }) - - it('Should emit an event', async () => { - const setTx = await bm.connect(owner).setKeeperRegistryAddress(newAddress) - await expect(setTx) - .to.emit(bm, 'KeeperRegistryAddressUpdated') - .withArgs(keeperRegistry.address, newAddress) - }) - }) - - describe('getMinWaitPeriodSeconds / setMinWaitPeriodSeconds()', () => { - const newWaitPeriod = BigNumber.from(1) - - it('Should initialize with the wait period provided to the constructor', async () => { - const minWaitPeriod = await bm.getMinWaitPeriodSeconds() - expect(minWaitPeriod).to.equal(0) - }) - - it('Should allow owner to set the wait period', async () => { - const setTx = await bm - .connect(owner) - .setMinWaitPeriodSeconds(newWaitPeriod) - await setTx.wait() - const minWaitPeriod = await bm.getMinWaitPeriodSeconds() - expect(minWaitPeriod).to.equal(newWaitPeriod) - }) - - it('Should not allow strangers to set the wait period', async () => { - const setTx = bm.connect(stranger).setMinWaitPeriodSeconds(newWaitPeriod) - await expect(setTx).to.be.revertedWith(OWNABLE_ERR) - }) - - it('Should emit an event', async () => { - const setTx = await bm - .connect(owner) - .setMinWaitPeriodSeconds(newWaitPeriod) - await expect(setTx) - .to.emit(bm, 'MinWaitPeriodUpdated') - .withArgs(0, newWaitPeriod) - }) - }) - - describe('checkUpkeep() / getUnderfundedAddresses()', () => { - beforeEach(async () => { - const setTx = await bm.connect(owner).setWatchList( - [ - watchAddress1, // needs funds - watchAddress5, // funded - watchAddress2, // needs funds - watchAddress6, // funded - watchAddress3, // needs funds - ], - new Array(5).fill(oneEth), - new Array(5).fill(twoEth), - ) - await setTx.wait() - }) - - it('Should return list of address that are underfunded', async () => { - const fundTx = await owner.sendTransaction({ - to: bm.address, - value: sixEth, // needs 6 total - }) - await fundTx.wait() - const [should, payload] = await bm.checkUpkeep('0x') - assert.isTrue(should) - let [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - assert.deepEqual(addresses, [watchAddress1, watchAddress2, watchAddress3]) - // checkUpkeep payload should match getUnderfundedAddresses() - addresses = await bm.getUnderfundedAddresses() - assert.deepEqual(addresses, [watchAddress1, watchAddress2, watchAddress3]) - }) - - it('Should return some results even if contract cannot fund all eligible targets', async () => { - const fundTx = await owner.sendTransaction({ - to: bm.address, - value: fiveEth, // needs 6 total - }) - await fundTx.wait() - const [should, payload] = await bm.checkUpkeep('0x') - assert.isTrue(should) - const [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - assert.deepEqual(addresses, [watchAddress1, watchAddress2]) - }) - - it('Should omit addresses that have been funded recently', async () => { - const setWaitPdTx = await bm.setMinWaitPeriodSeconds(3600) // 1 hour - const fundTx = await owner.sendTransaction({ - to: bm.address, - value: sixEth, - }) - await Promise.all([setWaitPdTx.wait(), fundTx.wait()]) - const block = await ethers.provider.getBlock('latest') - const setTopUpTx = await bm.setLastTopUpXXXTestOnly( - watchAddress2, - block.timestamp - 100, - ) - await setTopUpTx.wait() - const [should, payload] = await bm.checkUpkeep('0x') - assert.isTrue(should) - const [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - assert.deepEqual(addresses, [watchAddress1, watchAddress3]) - }) - - it('Should revert when paused', async () => { - const tx = await bm.connect(owner).pause() - await tx.wait() - const ethCall = bm.checkUpkeep('0x') - await expect(ethCall).to.be.revertedWith(PAUSED_ERR) - }) - }) - - describe('performUpkeep()', () => { - let validPayload: string - let invalidPayload: string - - beforeEach(async () => { - validPayload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [[watchAddress1, watchAddress2, watchAddress3]], - ) - invalidPayload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [[watchAddress1, watchAddress2, watchAddress4, watchAddress5]], - ) - const setTx = await bm.connect(owner).setWatchList( - [ - watchAddress1, // needs funds - watchAddress5, // funded - watchAddress2, // needs funds - watchAddress6, // funded - watchAddress3, // needs funds - // watchAddress4 - omitted - ], - new Array(5).fill(oneEth), - new Array(5).fill(twoEth), - ) - await setTx.wait() - }) - - it('Should revert when paused', async () => { - const pauseTx = await bm.connect(owner).pause() - await pauseTx.wait() - const performTx = bm.connect(keeperRegistry).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWith(PAUSED_ERR) - }) - - context('when partially funded', () => { - it('Should fund as many addresses as possible', async () => { - const fundTx = await owner.sendTransaction({ - to: bm.address, - value: fiveEth, // only enough eth to fund 2 addresses - }) - await fundTx.wait() - await assertWatchlistBalances(0, 0, 0, 0, 10_000, 10_000) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload) - await assertWatchlistBalances(2, 2, 0, 0, 10_000, 10_000) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(watchAddress1) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(watchAddress2) - }) - }) - - context('when fully funded', () => { - beforeEach(async () => { - const fundTx = await owner.sendTransaction({ - to: bm.address, - value: tenEth, - }) - await fundTx.wait() - }) - - it('Should fund the appropriate addresses', async () => { - await assertWatchlistBalances(0, 0, 0, 0, 10_000, 10_000) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances(2, 2, 2, 0, 10_000, 10_000) - }) - - it('Should only fund active, underfunded addresses', async () => { - await assertWatchlistBalances(0, 0, 0, 0, 10_000, 10_000) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(invalidPayload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances(2, 2, 0, 0, 10_000, 10_000) - }) - - it('Should continue funding addresses even if one reverts', async () => { - await assertWatchlistBalances(0, 0, 0, 0, 10_000, 10_000) - const addresses = [ - watchAddress1, - receiveReverter.address, - watchAddress2, - ] - const setTx = await bm - .connect(owner) - .setWatchList( - addresses, - new Array(3).fill(oneEth), - new Array(3).fill(twoEth), - ) - await setTx.wait() - const payload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [addresses], - ) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(payload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances(2, 2, 0, 0, 10_000, 10_000) - await h.assertBalance(receiveReverter.address, 0) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(watchAddress1) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(watchAddress2) - await expect(performTx) - .to.emit(bm, 'TopUpFailed') - .withArgs(receiveReverter.address) - }) - - it('Should not fund addresses that have been funded recently', async () => { - const setWaitPdTx = await bm.setMinWaitPeriodSeconds(3600) // 1 hour - await setWaitPdTx.wait() - const block = await ethers.provider.getBlock('latest') - const setTopUpTx = await bm.setLastTopUpXXXTestOnly( - watchAddress2, - block.timestamp - 100, - ) - await setTopUpTx.wait() - await assertWatchlistBalances(0, 0, 0, 0, 10_000, 10_000) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 2_500_000 }) - await performTx.wait() - await assertWatchlistBalances(2, 0, 2, 0, 10_000, 10_000) - }) - - it('Should only be callable by the keeper registry contract', async () => { - let performTx = bm.connect(owner).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWithCustomError( - bm, - ONLY_KEEPER_ERR, - ) - performTx = bm.connect(stranger).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWithCustomError( - bm, - ONLY_KEEPER_ERR, - ) - }) - - it('Should protect against running out of gas', async () => { - await assertWatchlistBalances(0, 0, 0, 0, 10_000, 10_000) - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 130_000 }) // too little for all 3 transfers - await performTx.wait() - const balance1 = await ethers.provider.getBalance(watchAddress1) - const balance2 = await ethers.provider.getBalance(watchAddress2) - const balance3 = await ethers.provider.getBalance(watchAddress3) - const balances = [balance1, balance2, balance3].map((n) => n.toString()) - expect(balances) - .to.include(twoEth.toString()) // expect at least 1 transfer - .to.include(zeroEth.toString()) // expect at least 1 out of funds - }) - - it('Should provide enough gas to support receive and fallback functions', async () => { - const addresses = [ - receiveEmitter.address, - receiveFallbackEmitter.address, - ] - const payload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [addresses], - ) - const setTx = await bm - .connect(owner) - .setWatchList( - addresses, - new Array(2).fill(oneEth), - new Array(2).fill(twoEth), - ) - await setTx.wait() - - const reBalanceBefore = await ethers.provider.getBalance( - receiveEmitter.address, - ) - const rfeBalanceBefore = await ethers.provider.getBalance( - receiveFallbackEmitter.address, - ) - - const performTx = await bm - .connect(keeperRegistry) - .performUpkeep(payload, { gasLimit: 2_500_000 }) - await h.assertBalance( - receiveEmitter.address, - reBalanceBefore.add(twoEth), - ) - await h.assertBalance( - receiveFallbackEmitter.address, - rfeBalanceBefore.add(twoEth), - ) - - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(receiveEmitter.address) - await expect(performTx) - .to.emit(bm, 'TopUpSucceeded') - .withArgs(receiveFallbackEmitter.address) - }) - }) - }) - - describe('topUp()', () => { - context('when not paused', () => { - it('Should be callable by anyone', async () => { - const users = [owner, keeperRegistry, stranger] - for (let idx = 0; idx < users.length; idx++) { - const user = users[idx] - await bm.connect(user).topUp([]) - } - }) - }) - context('when paused', () => { - it('Should be callable by no one', async () => { - await bm.connect(owner).pause() - const users = [owner, keeperRegistry, stranger] - for (let idx = 0; idx < users.length; idx++) { - const user = users[idx] - const tx = bm.connect(user).topUp([]) - await expect(tx).to.be.revertedWith(PAUSED_ERR) - } - }) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/IAutomationRegistryMaster2_2.test.ts b/contracts/test/v0.8/automation/IAutomationRegistryMaster2_2.test.ts deleted file mode 100644 index 11da7273ab9..00000000000 --- a/contracts/test/v0.8/automation/IAutomationRegistryMaster2_2.test.ts +++ /dev/null @@ -1,117 +0,0 @@ -import fs from 'fs' -import { ethers } from 'hardhat' -import { assert } from 'chai' -import { AutomationRegistry2_2__factory as AutomationRegistryFactory } from '../../../typechain/factories/AutomationRegistry2_2__factory' -import { AutomationRegistryLogicA2_2__factory as AutomationRegistryLogicAFactory } from '../../../typechain/factories/AutomationRegistryLogicA2_2__factory' -import { AutomationRegistryLogicB2_2__factory as AutomationRegistryLogicBFactory } from '../../../typechain/factories/AutomationRegistryLogicB2_2__factory' -import { AutomationRegistryBase2_2__factory as AutomationRegistryBaseFactory } from '../../../typechain/factories/AutomationRegistryBase2_2__factory' -import { Chainable__factory as ChainableFactory } from '../../../typechain/factories/Chainable__factory' -import { IAutomationRegistryMaster__factory as IAutomationRegistryMasterFactory } from '../../../typechain/factories/IAutomationRegistryMaster__factory' -import { IAutomationRegistryConsumer__factory as IAutomationRegistryConsumerFactory } from '../../../typechain/factories/IAutomationRegistryConsumer__factory' -import { MigratableKeeperRegistryInterface__factory as MigratableKeeperRegistryInterfaceFactory } from '../../../typechain/factories/MigratableKeeperRegistryInterface__factory' -import { MigratableKeeperRegistryInterfaceV2__factory as MigratableKeeperRegistryInterfaceV2Factory } from '../../../typechain/factories/MigratableKeeperRegistryInterfaceV2__factory' -import { OCR2Abstract__factory as OCR2AbstractFactory } from '../../../typechain/factories/OCR2Abstract__factory' -import { IAutomationV21PlusCommon__factory as IAutomationV21PlusCommonFactory } from '../../../typechain/factories/IAutomationV21PlusCommon__factory' -import { - assertSatisfiesEvents, - assertSatisfiesInterface, - entryID, -} from './helpers' - -const compositeABIs = [ - AutomationRegistryFactory.abi, - AutomationRegistryLogicAFactory.abi, - AutomationRegistryLogicBFactory.abi, -] - -/** - * @dev because the keeper master interface is a composite of several different contracts, - * it is possible that an interface could be satisfied by functions across different - * contracts, and therefore not enforceable by the compiler directly. Instead, we use this - * test to assert that the master interface satisfies the constraints of an individual interface - */ -describe('IAutomationRegistryMaster2_2', () => { - it('is up to date', async () => { - const checksum = ethers.utils.id(compositeABIs.join('')) - const knownChecksum = fs - .readFileSync( - 'src/v0.8/automation/interfaces/v2_2/IAutomationRegistryMaster.sol', - ) - .toString() - .slice(17, 83) // checksum located at top of file - assert.equal( - checksum, - knownChecksum, - 'master interface is out of date - regenerate using "pnpm ts-node ./scripts/generate-automation-master-interface.ts"', - ) - }) - - it('is generated from composite contracts without competing definitions', async () => { - const sharedEntries = [ - ...ChainableFactory.abi, - ...AutomationRegistryBaseFactory.abi, - ] - const abiSet = new Set() - const sharedSet = new Set() - for (const entry of sharedEntries) { - sharedSet.add(entryID(entry)) - } - for (const abi of compositeABIs) { - for (const entry of abi) { - const id = entryID(entry) - if (!abiSet.has(id)) { - abiSet.add(id) - } else if (!sharedSet.has(id)) { - assert.fail( - `composite contracts contain duplicate entry: ${JSON.stringify( - entry, - )}`, - ) - } - } - } - }) - - it('satisfies the IAutomationRegistryConsumer interface', async () => { - assertSatisfiesInterface( - IAutomationRegistryMasterFactory.abi, - IAutomationRegistryConsumerFactory.abi, - ) - }) - - it('satisfies the MigratableKeeperRegistryInterface interface', async () => { - assertSatisfiesInterface( - IAutomationRegistryMasterFactory.abi, - MigratableKeeperRegistryInterfaceFactory.abi, - ) - }) - - it('satisfies the MigratableKeeperRegistryInterfaceV2 interface', async () => { - assertSatisfiesInterface( - IAutomationRegistryMasterFactory.abi, - MigratableKeeperRegistryInterfaceV2Factory.abi, - ) - }) - - // temporarily disable this test due to this update: https://github.com/smartcontractkit/chainlink/pull/14369/files#diff-6e79d46ea0ef204dea679ffd2a9f4dfccd090d8f405ba2d9bffad527d7b862c6L44 - it.skip('satisfies the OCR2Abstract interface', async () => { - assertSatisfiesInterface( - IAutomationRegistryMasterFactory.abi, - OCR2AbstractFactory.abi, - ) - }) - - it('satisfies the IAutomationV2Common interface', async () => { - assertSatisfiesInterface( - IAutomationRegistryMasterFactory.abi, - IAutomationV21PlusCommonFactory.abi, - ) - }) - - it('satisfies the IAutomationV2Common events', async () => { - assertSatisfiesEvents( - IAutomationRegistryMasterFactory.abi, - IAutomationV21PlusCommonFactory.abi, - ) - }) -}) diff --git a/contracts/test/v0.8/automation/LinkAvailableBalanceMonitor.test.ts b/contracts/test/v0.8/automation/LinkAvailableBalanceMonitor.test.ts deleted file mode 100644 index f63de3498b1..00000000000 --- a/contracts/test/v0.8/automation/LinkAvailableBalanceMonitor.test.ts +++ /dev/null @@ -1,1077 +0,0 @@ -import { ethers } from 'hardhat' -import chai, { assert, expect } from 'chai' -import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import { loadFixture } from '@nomicfoundation/hardhat-network-helpers' -import * as h from '../../test-helpers/helpers' -import { mineBlock } from '../../test-helpers/helpers' -import { IAggregatorProxy__factory as IAggregatorProxyFactory } from '../../../typechain/factories/IAggregatorProxy__factory' -import { ILinkAvailable__factory as ILinkAvailableFactory } from '../../../typechain/factories/ILinkAvailable__factory' -import { LinkAvailableBalanceMonitor, LinkToken } from '../../../typechain' -import { BigNumber } from 'ethers' -import deepEqualInAnyOrder from 'deep-equal-in-any-order' -import { - deployMockContract, - MockContract, -} from '@ethereum-waffle/mock-contract' - -chai.use(deepEqualInAnyOrder) - -//////////////////////////////// GAS USAGE LIMITS - CHANGE WITH CAUTION ////////////////////////// -// // -// we try to keep gas usage under this amount (max is 5M) // -const TARGET_PERFORM_GAS_LIMIT = 2_000_000 -// we try to keep gas usage under this amount (max is 5M) the test is not a perfectly accurate // -// measurement of gas usage because it relies on mocks which may do fewer storage reads // -// therefore, we keep a healthy margin to avoid running over the limit! // -const TARGET_CHECK_GAS_LIMIT = 3_500_000 -// // -////////////////////////////////////////////////////////////////////////////////////////////////// -const INVALID_WATCHLIST_ERR = `InvalidWatchList` -const PAUSED_ERR = 'Pausable: paused' - -const zeroLINK = ethers.utils.parseEther('0') -const oneLINK = ethers.utils.parseEther('1') -const twoLINK = ethers.utils.parseEther('2') -const fourLINK = ethers.utils.parseEther('4') -const tenLINK = ethers.utils.parseEther('10') -const oneHundredLINK = ethers.utils.parseEther('100') - -const randAddr = () => ethers.Wallet.createRandom().address - -let labm: LinkAvailableBalanceMonitor -let lt: LinkToken -let owner: SignerWithAddress -let stranger: SignerWithAddress -let keeperRegistry: SignerWithAddress -let proxy1: MockContract -let proxy2: MockContract -let proxy3: MockContract -let proxy4: MockContract // leave this proxy / aggregator unconfigured for topUp() testing -let aggregator1: MockContract -let aggregator2: MockContract -let aggregator3: MockContract -let aggregator4: MockContract // leave this proxy / aggregator unconfigured for topUp() testing - -let directTarget1: MockContract // Contracts which are direct target of balance monitoring without proxy -let directTarget2: MockContract - -let watchListAddresses: string[] -let watchListMinBalances: BigNumber[] -let watchListTopUpAmounts: BigNumber[] -let watchListDstChainSelectors: number[] - -async function assertContractLinkBalances( - balance1: BigNumber, - balance2: BigNumber, - balance3: BigNumber, - balance4: BigNumber, - balance5: BigNumber, -) { - await h.assertLinkTokenBalance(lt, aggregator1.address, balance1, 'address 1') - await h.assertLinkTokenBalance(lt, aggregator2.address, balance2, 'address 2') - await h.assertLinkTokenBalance(lt, aggregator3.address, balance3, 'address 3') - await h.assertLinkTokenBalance( - lt, - directTarget1.address, - balance4, - 'address 4', - ) - await h.assertLinkTokenBalance( - lt, - directTarget2.address, - balance5, - 'address 5', - ) -} - -const setup = async () => { - const accounts = await ethers.getSigners() - owner = accounts[0] - stranger = accounts[1] - keeperRegistry = accounts[2] - - proxy1 = await deployMockContract(owner, IAggregatorProxyFactory.abi) - proxy2 = await deployMockContract(owner, IAggregatorProxyFactory.abi) - proxy3 = await deployMockContract(owner, IAggregatorProxyFactory.abi) - proxy4 = await deployMockContract(owner, IAggregatorProxyFactory.abi) - aggregator1 = await deployMockContract(owner, ILinkAvailableFactory.abi) - aggregator2 = await deployMockContract(owner, ILinkAvailableFactory.abi) - aggregator3 = await deployMockContract(owner, ILinkAvailableFactory.abi) - aggregator4 = await deployMockContract(owner, ILinkAvailableFactory.abi) - directTarget1 = await deployMockContract(owner, ILinkAvailableFactory.abi) - directTarget2 = await deployMockContract(owner, ILinkAvailableFactory.abi) - - await proxy1.deployed() - await proxy2.deployed() - await proxy3.deployed() - await proxy4.deployed() - await aggregator1.deployed() - await aggregator2.deployed() - await aggregator3.deployed() - await aggregator4.deployed() - await directTarget1.deployed() - await directTarget2.deployed() - - watchListAddresses = [ - proxy1.address, - proxy2.address, - proxy3.address, - directTarget1.address, - directTarget2.address, - ] - watchListMinBalances = [oneLINK, oneLINK, oneLINK, twoLINK, twoLINK] - watchListTopUpAmounts = [twoLINK, twoLINK, twoLINK, twoLINK, twoLINK] - watchListDstChainSelectors = [1, 2, 3, 4, 5] - - await proxy1.mock.aggregator.returns(aggregator1.address) - await proxy2.mock.aggregator.returns(aggregator2.address) - await proxy3.mock.aggregator.returns(aggregator3.address) - - await aggregator1.mock.linkAvailableForPayment.returns(0) - await aggregator2.mock.linkAvailableForPayment.returns(0) - await aggregator3.mock.linkAvailableForPayment.returns(0) - - await directTarget1.mock.linkAvailableForPayment.returns(0) - await directTarget2.mock.linkAvailableForPayment.returns(0) - - const labmFactory = await ethers.getContractFactory( - 'LinkAvailableBalanceMonitor', - owner, - ) - const ltFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - owner, - ) - - // New parameters needed by the constructor - const maxPerform = 5 - const maxCheck = 20 - const minWaitPeriodSeconds = 0 - const upkeepInterval = 10 - - lt = (await ltFactory.deploy()) as LinkToken - labm = await labmFactory.deploy( - owner.address, - lt.address, - minWaitPeriodSeconds, - maxPerform, - maxCheck, - upkeepInterval, - ) - await labm.deployed() - - for (let i = 1; i <= 4; i++) { - const recipient = await accounts[i].getAddress() - await lt.connect(owner).transfer(recipient, oneHundredLINK) - } - - const setTx = await labm - .connect(owner) - .setWatchList( - watchListAddresses, - watchListMinBalances, - watchListTopUpAmounts, - watchListDstChainSelectors, - ) - await setTx.wait() -} - -describe('LinkAvailableBalanceMonitor', () => { - beforeEach(async () => { - await loadFixture(setup) - }) - - describe('add funds', () => { - it('should allow anyone to add funds', async () => { - await lt.transfer(labm.address, oneLINK) - await lt.connect(stranger).transfer(labm.address, oneLINK) - }) - }) - - describe('setTopUpAmount()', () => { - it('configures the top-up amount', async () => { - await labm - .connect(owner) - .setTopUpAmount(directTarget1.address, BigNumber.from(100)) - const report = await labm.getAccountInfo(directTarget1.address) - assert.equal(report.topUpAmount.toString(), '100') - }) - - it('is only callable by the owner', async () => { - await expect( - labm.connect(stranger).setTopUpAmount(directTarget1.address, 100), - ).to.be.reverted - }) - }) - - describe('setMinBalance()', () => { - it('configures the min balance', async () => { - await labm - .connect(owner) - .setMinBalance(proxy1.address, BigNumber.from(100)) - const report = await labm.getAccountInfo(proxy1.address) - assert.equal(report.minBalance.toString(), '100') - }) - - it('reverts if address is not in the watchlist', async () => { - await expect(labm.connect(owner).setMinBalance(proxy4.address, 100)).to.be - .reverted - }) - - it('is only callable by the owner', async () => { - await expect(labm.connect(stranger).setMinBalance(proxy1.address, 100)).to - .be.reverted - }) - }) - - describe('setMaxPerform()', () => { - it('configures the MaxPerform', async () => { - await labm.connect(owner).setMaxPerform(BigNumber.from(100)) - const report = await labm.getMaxPerform() - assert.equal(report.toString(), '100') - }) - - it('is only callable by the owner', async () => { - await expect(labm.connect(stranger).setMaxPerform(100)).to.be.reverted - }) - }) - - describe('setMaxCheck()', () => { - it('configures the MaxCheck', async () => { - await labm.connect(owner).setMaxCheck(BigNumber.from(100)) - const report = await labm.getMaxCheck() - assert.equal(report.toString(), '100') - }) - - it('is only callable by the owner', async () => { - await expect(labm.connect(stranger).setMaxCheck(100)).to.be.reverted - }) - }) - - describe('setUpkeepInterval()', () => { - it('configures the UpkeepInterval', async () => { - await labm.connect(owner).setUpkeepInterval(BigNumber.from(100)) - const report = await labm.getUpkeepInterval() - assert.equal(report.toString(), '100') - }) - - it('is only callable by the owner', async () => { - await expect(labm.connect(stranger).setUpkeepInterval(100)).to.be.reverted - }) - }) - - describe('withdraw()', () => { - beforeEach(async () => { - const tx = await lt.connect(owner).transfer(labm.address, oneLINK) - await tx.wait() - }) - - it('should allow the owner to withdraw', async () => { - const beforeBalance = await lt.balanceOf(owner.address) - const tx = await labm.connect(owner).withdraw(oneLINK, owner.address) - await tx.wait() - const afterBalance = await lt.balanceOf(owner.address) - assert.isTrue( - afterBalance.gt(beforeBalance), - 'balance did not increase after withdraw', - ) - }) - - it('should emit an event', async () => { - const tx = await labm.connect(owner).withdraw(oneLINK, owner.address) - await expect(tx) - .to.emit(labm, 'FundsWithdrawn') - .withArgs(oneLINK, owner.address) - }) - - it('should allow the owner to withdraw to anyone', async () => { - const beforeBalance = await lt.balanceOf(stranger.address) - const tx = await labm.connect(owner).withdraw(oneLINK, stranger.address) - await tx.wait() - const afterBalance = await lt.balanceOf(stranger.address) - assert.isTrue( - beforeBalance.add(oneLINK).eq(afterBalance), - 'balance did not increase after withdraw', - ) - }) - - it('should not allow strangers to withdraw', async () => { - const tx = labm.connect(stranger).withdraw(oneLINK, owner.address) - await expect(tx).to.be.reverted - }) - }) - - describe('pause() / unpause()', () => { - it('should allow owner to pause / unpause', async () => { - const pauseTx = await labm.connect(owner).pause() - await pauseTx.wait() - const unpauseTx = await labm.connect(owner).unpause() - await unpauseTx.wait() - }) - - it('should not allow strangers to pause / unpause', async () => { - const pauseTxStranger = labm.connect(stranger).pause() - await expect(pauseTxStranger).to.be.reverted - const pauseTxOwner = await labm.connect(owner).pause() - await pauseTxOwner.wait() - const unpauseTxStranger = labm.connect(stranger).unpause() - await expect(unpauseTxStranger).to.be.reverted - }) - }) - - describe('setWatchList() / addToWatchListOrDecommissionOrDecommission() / removeFromWatchlist() / getWatchList()', () => { - const watchAddress1 = randAddr() - const watchAddress2 = randAddr() - const watchAddress3 = randAddr() - - beforeEach(async () => { - // reset watchlist to empty before running these tests - await labm.connect(owner).setWatchList([], [], [], []) - const watchList = await labm.getWatchList() - assert.deepEqual(watchList, []) - }) - - it('should allow owner to adjust the watchlist', async () => { - // add first watchlist - await labm - .connect(owner) - .setWatchList([watchAddress1], [oneLINK], [oneLINK], [0]) - let watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress1) - // add more to watchlist - const tx = await labm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress3], - [oneLINK, oneLINK, oneLINK], - [oneLINK, oneLINK, oneLINK], - [1, 2, 3], - ) - await tx.wait() - watchList = await labm.getWatchList() - assert.deepEqual(watchList, [watchAddress1, watchAddress2, watchAddress3]) - }) - - it('should not allow different length arrays in the watchlist', async () => { - const tx = labm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress1], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - await expect(tx).to.be.revertedWithCustomError( - labm, - INVALID_WATCHLIST_ERR, - ) - }) - - it('should not allow duplicates in the watchlist', async () => { - const tx = labm - .connect(owner) - .setWatchList( - [watchAddress1, watchAddress2, watchAddress1], - [oneLINK, oneLINK, oneLINK], - [oneLINK, oneLINK, oneLINK], - [1, 2, 3], - ) - await expect(tx) - .to.be.revertedWithCustomError(labm, 'DuplicateAddress') - .withArgs(watchAddress1) - }) - - it('should not allow strangers to set the watchlist', async () => { - const setTxStranger = labm - .connect(stranger) - .setWatchList([watchAddress1], [oneLINK], [oneLINK], [0]) - await expect(setTxStranger).to.be.reverted - }) - - it('should revert if any of the addresses are empty', async () => { - const tx = labm - .connect(owner) - .setWatchList( - [watchAddress1, ethers.constants.AddressZero], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - await expect(tx).to.be.revertedWithCustomError( - labm, - INVALID_WATCHLIST_ERR, - ) - }) - - it('should allow owner to add multiple addresses with dstChainSelector 0 to the watchlist', async () => { - let tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress1, 0) - await tx.wait - let watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress1) - - tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress2, 0) - await tx.wait - watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress1) - assert.deepEqual(watchList[1], watchAddress2) - - tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress3, 0) - await tx.wait - watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress1) - assert.deepEqual(watchList[1], watchAddress2) - assert.deepEqual(watchList[2], watchAddress3) - }) - - it('should allow owner to add only one address with an unique non-zero dstChainSelector 0 to the watchlist', async () => { - let tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress1, 1) - await tx.wait - let watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress1) - - // 1 is active - let report = await labm.getAccountInfo(watchAddress1) - assert.isTrue(report.isActive) - - tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress2, 1) - await tx.wait - watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress2) - - // 2 is active, 1 should be false - report = await labm.getAccountInfo(watchAddress2) - assert.isTrue(report.isActive) - report = await labm.getAccountInfo(watchAddress1) - assert.isFalse(report.isActive) - - tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress3, 1) - await tx.wait - watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress3) - - // 3 is active, 1 and 2 should be false - report = await labm.getAccountInfo(watchAddress3) - assert.isTrue(report.isActive) - report = await labm.getAccountInfo(watchAddress2) - assert.isFalse(report.isActive) - report = await labm.getAccountInfo(watchAddress1) - assert.isFalse(report.isActive) - }) - - it('should not add address 0 to the watchlist', async () => { - await labm - .connect(owner) - .addToWatchListOrDecommission(ethers.constants.AddressZero, 1) - expect(await labm.getWatchList()).to.not.contain( - ethers.constants.AddressZero, - ) - }) - - it('should not allow stangers to add addresses to the watchlist', async () => { - await expect( - labm.connect(stranger).addToWatchListOrDecommission(watchAddress1, 1), - ).to.be.reverted - }) - - it('should allow owner to remove addresses from the watchlist', async () => { - const tx = await labm - .connect(owner) - .addToWatchListOrDecommission(watchAddress1, 1) - await tx.wait - let watchList = await labm.getWatchList() - assert.deepEqual(watchList[0], watchAddress1) - let report = await labm.getAccountInfo(watchAddress1) - assert.isTrue(report.isActive) - - // remove address - await labm.connect(owner).removeFromWatchList(watchAddress1) - - // address should be false - report = await labm.getAccountInfo(watchAddress1) - assert.isFalse(report.isActive) - - watchList = await labm.getWatchList() - assert.deepEqual(watchList, []) - }) - - it('should allow only one address per dstChainSelector', async () => { - // add address1 - await labm.connect(owner).addToWatchListOrDecommission(watchAddress1, 1) - expect(await labm.getWatchList()).to.contain(watchAddress1) - - // add address2 - await labm.connect(owner).addToWatchListOrDecommission(watchAddress2, 1) - - // only address2 has to be in the watchlist - const watchlist = await labm.getWatchList() - expect(watchlist).to.not.contain(watchAddress1) - expect(watchlist).to.contain(watchAddress2) - }) - - it('should delete the onRamp address on a zero-address with same dstChainSelector', async () => { - // add address1 - await labm.connect(owner).addToWatchListOrDecommission(watchAddress1, 1) - expect(await labm.getWatchList()).to.contain(watchAddress1) - - // simulates an onRampSet(zeroAddress, same dstChainSelector) - await labm - .connect(owner) - .addToWatchListOrDecommission(ethers.constants.AddressZero, 1) - - // address1 should be cleaned - const watchlist = await labm.getWatchList() - expect(watchlist).to.not.contain(watchAddress1) - assert.deepEqual(watchlist, []) - }) - }) - - describe('checkUpkeep() / sampleUnderfundedAddresses() [ @skip-coverage ]', () => { - it('should return list of address that are underfunded', async () => { - const fundTx = await lt - .connect(owner) - .transfer(labm.address, oneHundredLINK) - await fundTx.wait() - - await labm.setWatchList( - watchListAddresses, - watchListMinBalances, - watchListTopUpAmounts, - watchListDstChainSelectors, - ) - - const [should, payload] = await labm.checkUpkeep('0x') - assert.isTrue(should) - let [addresses] = ethers.utils.defaultAbiCoder.decode( - ['address[]'], - payload, - ) - - expect(addresses).to.deep.equalInAnyOrder(watchListAddresses) - addresses = await labm.sampleUnderfundedAddresses() - expect(addresses).to.deep.equalInAnyOrder(watchListAddresses) - }) - - it('should return false because the monitor is underfunded', async () => { - // it needs 10 LINKs to fund all 5 upkeeps, but it only has 8 LINKs - const fundTx = await lt - .connect(owner) - .transfer(labm.address, fourLINK.add(fourLINK)) - await fundTx.wait() - - await labm.setWatchList( - watchListAddresses, - watchListMinBalances, - watchListTopUpAmounts, - watchListDstChainSelectors, - ) - - const [should, _] = await labm.checkUpkeep('0x') - assert.isFalse(should) - }) - - it('should omit aggregators that have sufficient funding', async () => { - const fundTx = await lt.connect(owner).transfer( - labm.address, - oneHundredLINK, // enough for anything that needs funding - ) - await fundTx.wait() - - await labm.setWatchList( - [aggregator2.address, directTarget1.address, directTarget2.address], - [oneLINK, twoLINK, twoLINK], - [oneLINK, oneLINK, oneLINK], - [1, 2, 3], - ) - - // all of them are underfunded, return 3 - await aggregator2.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget1.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget2.mock.linkAvailableForPayment.returns(zeroLINK) - - let addresses = await labm.sampleUnderfundedAddresses() - expect(addresses).to.deep.equalInAnyOrder([ - aggregator2.address, - directTarget1.address, - directTarget2.address, - ]) - - await aggregator2.mock.linkAvailableForPayment.returns(oneLINK) // aggregator2 is enough funded - await directTarget1.mock.linkAvailableForPayment.returns(oneLINK) // directTarget1 is NOT enough funded - await directTarget2.mock.linkAvailableForPayment.returns(oneLINK) // directTarget2 is NOT funded - addresses = await labm.sampleUnderfundedAddresses() - expect(addresses).to.deep.equalInAnyOrder([ - directTarget1.address, - directTarget2.address, - ]) - - await directTarget1.mock.linkAvailableForPayment.returns(tenLINK) - addresses = await labm.sampleUnderfundedAddresses() - expect(addresses).to.deep.equalInAnyOrder([directTarget2.address]) - - await directTarget2.mock.linkAvailableForPayment.returns(tenLINK) - addresses = await labm.sampleUnderfundedAddresses() - expect(addresses).to.deep.equalInAnyOrder([]) - }) - - it('should revert when paused', async () => { - const tx = await labm.connect(owner).pause() - await tx.wait() - const ethCall = labm.checkUpkeep('0x') - await expect(ethCall).to.be.revertedWith(PAUSED_ERR) - }) - - context('with a large set of proxies', async () => { - // in this test, we cheat a little bit and point each proxy to the same aggregator, - // which helps cut down on test time - let MAX_PERFORM: number - let MAX_CHECK: number - let proxyAddresses: string[] - let minBalances: BigNumber[] - let topUpAmount: BigNumber[] - let aggregators: MockContract[] - let dstChainSelectors: number[] - - beforeEach(async () => { - MAX_PERFORM = await labm.getMaxPerform() - MAX_CHECK = await labm.getMaxCheck() - proxyAddresses = [] - minBalances = [] - topUpAmount = [] - aggregators = [] - dstChainSelectors = [] - const numAggregators = MAX_CHECK + 50 - for (let idx = 0; idx < numAggregators; idx++) { - const proxy = await deployMockContract( - owner, - IAggregatorProxyFactory.abi, - ) - const aggregator = await deployMockContract( - owner, - ILinkAvailableFactory.abi, - ) - await proxy.mock.aggregator.returns(aggregator.address) - await aggregator.mock.linkAvailableForPayment.returns(0) - proxyAddresses.push(proxy.address) - minBalances.push(oneLINK) - topUpAmount.push(oneLINK) - aggregators.push(aggregator) - dstChainSelectors.push(0) - } - await labm.setWatchList( - proxyAddresses, - minBalances, - topUpAmount, - dstChainSelectors, - ) - const watchlist = await labm.getWatchList() - expect(watchlist).to.deep.equalInAnyOrder(proxyAddresses) - assert.equal(watchlist.length, minBalances.length) - }) - - it('should not include more than MAX_PERFORM addresses', async () => { - const addresses = await labm.sampleUnderfundedAddresses() - expect(addresses.length).to.be.lessThanOrEqual(MAX_PERFORM) - }) - - it('should sample from the list of addresses pseudorandomly', async () => { - const firstAddress: string[] = [] - for (let idx = 0; idx < 10; idx++) { - const addresses = await labm.sampleUnderfundedAddresses() - assert.equal(addresses.length, MAX_PERFORM) - assert.equal( - new Set(addresses).size, - MAX_PERFORM, - 'duplicate address found', - ) - firstAddress.push(addresses[0]) - await mineBlock(ethers.provider) - } - assert( - new Set(firstAddress).size > 1, - 'sample did not shuffle starting index', - ) - }) - - it('can check MAX_CHECK upkeeps within the allotted gas limit', async () => { - for (const aggregator of aggregators) { - // here we make no aggregators eligible for funding, requiring the function to - // traverse the whole list - await aggregator.mock.linkAvailableForPayment.returns(tenLINK) - } - await labm.checkUpkeep('0x', { gasLimit: TARGET_CHECK_GAS_LIMIT }) - }) - }) - }) - - describe('performUpkeep()', () => { - let validPayload: string - - beforeEach(async () => { - validPayload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [watchListAddresses], - ) - await labm - .connect(owner) - .setWatchList( - watchListAddresses, - watchListMinBalances, - watchListTopUpAmounts, - watchListDstChainSelectors, - ) - }) - - it('should revert when paused', async () => { - await labm.connect(owner).pause() - const performTx = labm.connect(keeperRegistry).performUpkeep(validPayload) - await expect(performTx).to.be.revertedWith(PAUSED_ERR) - }) - - it('should fund the appropriate addresses', async () => { - await aggregator1.mock.linkAvailableForPayment.returns(zeroLINK) - await aggregator2.mock.linkAvailableForPayment.returns(zeroLINK) - await aggregator3.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget1.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget2.mock.linkAvailableForPayment.returns(zeroLINK) - - const fundTx = await lt.connect(owner).transfer(labm.address, tenLINK) - await fundTx.wait() - - await h.assertLinkTokenBalance(lt, aggregator1.address, zeroLINK) - await h.assertLinkTokenBalance(lt, aggregator2.address, zeroLINK) - await h.assertLinkTokenBalance(lt, aggregator3.address, zeroLINK) - await h.assertLinkTokenBalance(lt, directTarget1.address, zeroLINK) - await h.assertLinkTokenBalance(lt, directTarget2.address, zeroLINK) - - const performTx = await labm - .connect(keeperRegistry) - .performUpkeep(validPayload, { gasLimit: 1_500_000 }) - await performTx.wait() - - await h.assertLinkTokenBalance(lt, aggregator1.address, twoLINK) - await h.assertLinkTokenBalance(lt, aggregator2.address, twoLINK) - await h.assertLinkTokenBalance(lt, aggregator3.address, twoLINK) - await h.assertLinkTokenBalance(lt, directTarget1.address, twoLINK) - await h.assertLinkTokenBalance(lt, directTarget2.address, twoLINK) - }) - - it('can handle MAX_PERFORM proxies within gas limit', async () => { - const MAX_PERFORM = await labm.getMaxPerform() - const proxyAddresses = [] - const minBalances = [] - const topUpAmount = [] - const dstChainSelectors = [] - for (let idx = 0; idx < MAX_PERFORM; idx++) { - const proxy = await deployMockContract( - owner, - IAggregatorProxyFactory.abi, - ) - const aggregator = await deployMockContract( - owner, - ILinkAvailableFactory.abi, - ) - await proxy.mock.aggregator.returns(aggregator.address) - await aggregator.mock.linkAvailableForPayment.returns(0) - proxyAddresses.push(proxy.address) - minBalances.push(oneLINK) - topUpAmount.push(oneLINK) - dstChainSelectors.push(0) - } - await labm.setWatchList( - proxyAddresses, - minBalances, - topUpAmount, - dstChainSelectors, - ) - const watchlist = await labm.getWatchList() - expect(watchlist).to.deep.equalInAnyOrder(proxyAddresses) - assert.equal(watchlist.length, minBalances.length) - - // add funds - const wl = await labm.getWatchList() - const fundsNeeded = BigNumber.from(0) - for (let idx = 0; idx < wl.length; idx++) { - const targetInfo = await labm.getAccountInfo(wl[idx]) - const targetTopUpAmount = targetInfo.topUpAmount - fundsNeeded.add(targetTopUpAmount) - } - await lt.connect(owner).transfer(labm.address, fundsNeeded) - - // encode payload - const payload = ethers.utils.defaultAbiCoder.encode( - ['address[]'], - [proxyAddresses], - ) - - // do the thing - await labm - .connect(keeperRegistry) - .performUpkeep(payload, { gasLimit: TARGET_PERFORM_GAS_LIMIT }) - }) - }) - - describe('topUp()', () => { - it('should revert topUp address(0)', async () => { - const tx = await labm.connect(owner).topUp([ethers.constants.AddressZero]) - await expect(tx).to.emit(labm, 'TopUpBlocked') - }) - - context('when not paused', () => { - it('should be callable by anyone', async () => { - const users = [owner, keeperRegistry, stranger] - for (let idx = 0; idx < users.length; idx++) { - const user = users[idx] - await labm.connect(user).topUp([]) - } - }) - }) - - context('when paused', () => { - it('should be callable by no one', async () => { - await labm.connect(owner).pause() - const users = [owner, keeperRegistry, stranger] - for (let idx = 0; idx < users.length; idx++) { - const user = users[idx] - const tx = labm.connect(user).topUp([]) - await expect(tx).to.be.revertedWith(PAUSED_ERR) - } - }) - }) - - context('when fully funded', () => { - beforeEach(async () => { - await lt.connect(owner).transfer(labm.address, tenLINK) - await assertContractLinkBalances( - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - zeroLINK, - ) - }) - - it('should fund the appropriate addresses', async () => { - const ai1 = await labm.getAccountInfo(proxy1.address) - assert.equal(0, ai1.lastTopUpTimestamp.toNumber()) - const ai4 = await labm.getAccountInfo(directTarget1.address) - assert.equal(0, ai4.lastTopUpTimestamp.toNumber()) - - const tx = await labm.connect(keeperRegistry).topUp(watchListAddresses) - - await aggregator1.mock.linkAvailableForPayment.returns(twoLINK) - await aggregator2.mock.linkAvailableForPayment.returns(twoLINK) - await aggregator3.mock.linkAvailableForPayment.returns(twoLINK) - await directTarget1.mock.linkAvailableForPayment.returns(twoLINK) - await directTarget2.mock.linkAvailableForPayment.returns(twoLINK) - - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator1.address, twoLINK) - assert.equal( - (await lt.balanceOf(aggregator1.address)).toBigInt(), - twoLINK.toBigInt(), - ) - const targetInfo1 = await labm.getAccountInfo(proxy1.address) - assert.notEqual(0, targetInfo1.lastTopUpTimestamp.toNumber()) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator2.address, twoLINK) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator3.address, twoLINK) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(directTarget1.address, twoLINK) - assert.equal( - (await lt.balanceOf(directTarget1.address)).toBigInt(), - twoLINK.toBigInt(), - ) - const targetInfo4 = await labm.getAccountInfo(directTarget1.address) - assert.notEqual(0, targetInfo4.lastTopUpTimestamp.toNumber()) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(directTarget2.address, twoLINK) - }) - - it('should only fund the addresses provided', async () => { - await labm - .connect(keeperRegistry) - .topUp([proxy1.address, directTarget1.address]) - - await aggregator1.mock.linkAvailableForPayment.returns(twoLINK) - await aggregator2.mock.linkAvailableForPayment.returns(zeroLINK) - await aggregator3.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget1.mock.linkAvailableForPayment.returns(twoLINK) - await directTarget2.mock.linkAvailableForPayment.returns(zeroLINK) - }) - - it('should skip un-approved addresses', async () => { - await labm - .connect(owner) - .setWatchList( - [proxy1.address, directTarget1.address], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - const tx = await labm - .connect(keeperRegistry) - .topUp([ - proxy1.address, - proxy2.address, - proxy3.address, - directTarget1.address, - directTarget2.address, - ]) - - await h.assertLinkTokenBalance(lt, aggregator1.address, oneLINK) - await h.assertLinkTokenBalance(lt, aggregator2.address, zeroLINK) - await h.assertLinkTokenBalance(lt, aggregator3.address, zeroLINK) - await h.assertLinkTokenBalance(lt, directTarget1.address, oneLINK) - await h.assertLinkTokenBalance(lt, directTarget2.address, zeroLINK) - - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator1.address, oneLINK) - const targetInfo1 = await labm.getAccountInfo(proxy1.address) - assert.notEqual(0, targetInfo1.lastTopUpTimestamp.toNumber()) - - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(directTarget1.address, oneLINK) - await expect(tx).to.emit(labm, 'TopUpBlocked').withArgs(proxy2.address) - await expect(tx).to.emit(labm, 'TopUpBlocked').withArgs(proxy3.address) - await expect(tx) - .to.emit(labm, 'TopUpBlocked') - .withArgs(directTarget2.address) - const targetInfo5 = await labm.getAccountInfo(directTarget2.address) - assert.equal(0, targetInfo5.lastTopUpTimestamp.toNumber()) - }) - - it('should skip an address if the proxy is invalid and it is not a direct target', async () => { - await labm - .connect(owner) - .setWatchList( - [proxy1.address, proxy4.address], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - const tx = await labm - .connect(keeperRegistry) - .topUp([proxy1.address, proxy4.address]) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator1.address, oneLINK) - await expect(tx).to.emit(labm, 'TopUpBlocked').withArgs(proxy4.address) - }) - - it('should skip an address if the aggregator is invalid', async () => { - await proxy4.mock.aggregator.returns(aggregator4.address) - await labm - .connect(owner) - .setWatchList( - [proxy1.address, proxy4.address], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - const tx = await labm - .connect(keeperRegistry) - .topUp([proxy1.address, proxy4.address]) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator1.address, oneLINK) - await expect(tx).to.emit(labm, 'TopUpBlocked').withArgs(proxy4.address) - }) - - it('should skip an address if the aggregator has sufficient funding', async () => { - await proxy4.mock.aggregator.returns(aggregator4.address) - await aggregator4.mock.linkAvailableForPayment.returns(tenLINK) - await labm - .connect(owner) - .setWatchList( - [proxy1.address, proxy4.address], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - const tx = await labm - .connect(keeperRegistry) - .topUp([proxy1.address, proxy4.address]) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator1.address, oneLINK) - await expect(tx).to.emit(labm, 'TopUpBlocked').withArgs(proxy4.address) - }) - - it('should skip an address if the direct target has sufficient funding', async () => { - await directTarget1.mock.linkAvailableForPayment.returns(tenLINK) - await labm - .connect(owner) - .setWatchList( - [proxy1.address, directTarget1.address], - [oneLINK, oneLINK], - [oneLINK, oneLINK], - [1, 2], - ) - const tx = await labm - .connect(keeperRegistry) - .topUp([proxy1.address, directTarget1.address]) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator1.address, oneLINK) - assert.equal( - (await lt.balanceOf(aggregator1.address)).toBigInt(), - oneLINK.toBigInt(), - ) - await expect(tx) - .to.emit(labm, 'TopUpBlocked') - .withArgs(directTarget1.address) - }) - }) - - context('when partially funded', () => { - it('should fund as many addresses as possible', async () => { - await lt.connect(owner).transfer( - labm.address, - fourLINK, // only enough LINK to fund 2 addresses - ) - - await aggregator1.mock.linkAvailableForPayment.returns(twoLINK) - await aggregator2.mock.linkAvailableForPayment.returns(twoLINK) - await aggregator3.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget1.mock.linkAvailableForPayment.returns(zeroLINK) - await directTarget2.mock.linkAvailableForPayment.returns(zeroLINK) - - const tx = await labm.connect(keeperRegistry).topUp(watchListAddresses) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(aggregator3.address, twoLINK) - await expect(tx) - .to.emit(labm, 'TopUpSucceeded') - .withArgs(directTarget1.address, twoLINK) - assert.equal( - (await lt.balanceOf(aggregator3.address)).toBigInt(), - twoLINK.toBigInt(), - ) - assert.equal( - (await lt.balanceOf(directTarget1.address)).toBigInt(), - twoLINK.toBigInt(), - ) - }) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/UpkeepBalanceMonitor.test.ts b/contracts/test/v0.8/automation/UpkeepBalanceMonitor.test.ts deleted file mode 100644 index 0ee244130ab..00000000000 --- a/contracts/test/v0.8/automation/UpkeepBalanceMonitor.test.ts +++ /dev/null @@ -1,402 +0,0 @@ -import { ethers } from 'hardhat' -import { expect } from 'chai' -import type { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers' -import { randomAddress } from '../../test-helpers/helpers' -import { loadFixture } from '@nomicfoundation/hardhat-network-helpers' -import { IKeeperRegistryMaster__factory as RegistryFactory } from '../../../typechain/factories/IKeeperRegistryMaster__factory' -import { IAutomationForwarder__factory as ForwarderFactory } from '../../../typechain/factories/IAutomationForwarder__factory' -import { UpkeepBalanceMonitor } from '../../../typechain/UpkeepBalanceMonitor' -import { LinkToken } from '../../../typechain/LinkToken' -import { BigNumber } from 'ethers' -import { - deployMockContract, - MockContract, -} from '@ethereum-waffle/mock-contract' - -let owner: SignerWithAddress -let stranger: SignerWithAddress -let registry: MockContract -let registry2: MockContract -let forwarder: MockContract -let linkToken: LinkToken -let upkeepBalanceMonitor: UpkeepBalanceMonitor - -const setup = async () => { - const accounts = await ethers.getSigners() - owner = accounts[0] - stranger = accounts[1] - - const ltFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - owner, - ) - linkToken = (await ltFactory.deploy()) as LinkToken - const bmFactory = await ethers.getContractFactory( - 'UpkeepBalanceMonitor', - owner, - ) - upkeepBalanceMonitor = await bmFactory.deploy(linkToken.address, { - maxBatchSize: 10, - minPercentage: 120, - targetPercentage: 300, - maxTopUpAmount: ethers.utils.parseEther('100'), - }) - registry = await deployMockContract(owner, RegistryFactory.abi) - registry2 = await deployMockContract(owner, RegistryFactory.abi) - forwarder = await deployMockContract(owner, ForwarderFactory.abi) - await forwarder.mock.getRegistry.returns(registry.address) - await upkeepBalanceMonitor.setForwarder(forwarder.address) - await linkToken - .connect(owner) - .transfer(upkeepBalanceMonitor.address, ethers.utils.parseEther('10000')) - await upkeepBalanceMonitor - .connect(owner) - .setWatchList(registry.address, [0, 1, 2, 3, 4, 5, 6, 7, 8]) - await upkeepBalanceMonitor - .connect(owner) - .setWatchList(registry2.address, [9, 10, 11]) - for (let i = 0; i < 9; i++) { - await registry.mock.getMinBalance.withArgs(i).returns(100) - await registry.mock.getBalance.withArgs(i).returns(121) // all upkeeps are sufficiently funded - } - for (let i = 9; i < 12; i++) { - await registry2.mock.getMinBalance.withArgs(i).returns(100) - await registry2.mock.getBalance.withArgs(i).returns(121) // all upkeeps are sufficiently funded - } -} - -describe('UpkeepBalanceMonitor', () => { - beforeEach(async () => { - await loadFixture(setup) - }) - - describe('constructor()', () => { - it('should set the initial values correctly', async () => { - const config = await upkeepBalanceMonitor.getConfig() - expect(config.maxBatchSize).to.equal(10) - expect(config.minPercentage).to.equal(120) - expect(config.targetPercentage).to.equal(300) - expect(config.maxTopUpAmount).to.equal(ethers.utils.parseEther('100')) - }) - }) - - describe('setConfig()', () => { - const newConfig = { - maxBatchSize: 100, - minPercentage: 150, - targetPercentage: 500, - maxTopUpAmount: 1, - } - - it('should set config correctly', async () => { - await upkeepBalanceMonitor.connect(owner).setConfig(newConfig) - const config = await upkeepBalanceMonitor.getConfig() - expect(config.maxBatchSize).to.equal(newConfig.maxBatchSize) - expect(config.minPercentage).to.equal(newConfig.minPercentage) - expect(config.targetPercentage).to.equal(newConfig.targetPercentage) - expect(config.maxTopUpAmount).to.equal(newConfig.maxTopUpAmount) - }) - - it('cannot be called by a non-owner', async () => { - await expect( - upkeepBalanceMonitor.connect(stranger).setConfig(newConfig), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should emit an event', async () => { - await expect( - upkeepBalanceMonitor.connect(owner).setConfig(newConfig), - ).to.emit(upkeepBalanceMonitor, 'ConfigSet') - }) - }) - - describe('setForwarder()', () => { - const newForwarder = randomAddress() - - it('should set the forwarder correctly', async () => { - await upkeepBalanceMonitor.connect(owner).setForwarder(newForwarder) - const forwarderAddress = await upkeepBalanceMonitor.getForwarder() - expect(forwarderAddress).to.equal(newForwarder) - }) - - it('cannot be called by a non-owner', async () => { - await expect( - upkeepBalanceMonitor.connect(stranger).setForwarder(randomAddress()), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should emit an event', async () => { - await expect( - upkeepBalanceMonitor.connect(owner).setForwarder(newForwarder), - ) - .to.emit(upkeepBalanceMonitor, 'ForwarderSet') - .withArgs(newForwarder) - }) - }) - - describe('setWatchList()', () => { - const newWatchList = [ - BigNumber.from(1), - BigNumber.from(2), - BigNumber.from(10), - ] - - it('should add addresses to the watchlist', async () => { - await upkeepBalanceMonitor - .connect(owner) - .setWatchList(registry.address, newWatchList) - const [_, upkeepIDs] = await upkeepBalanceMonitor.getWatchList() - expect(upkeepIDs[0]).to.deep.equal(newWatchList) - }) - - it('cannot be called by a non-owner', async () => { - await expect( - upkeepBalanceMonitor - .connect(stranger) - .setWatchList(registry.address, [1, 2, 3]), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should emit an event', async () => { - await expect( - upkeepBalanceMonitor - .connect(owner) - .setWatchList(registry.address, newWatchList), - ) - .to.emit(upkeepBalanceMonitor, 'WatchListSet') - .withArgs(registry.address) - }) - }) - - describe('withdraw()', () => { - const payee = randomAddress() - const withdrawAmount = 100 - - it('should withdraw funds to a payee', async () => { - const initialBalance = await linkToken.balanceOf( - upkeepBalanceMonitor.address, - ) - await upkeepBalanceMonitor.connect(owner).withdraw(withdrawAmount, payee) - const finalBalance = await linkToken.balanceOf( - upkeepBalanceMonitor.address, - ) - const payeeBalance = await linkToken.balanceOf(payee) - expect(finalBalance).to.equal(initialBalance.sub(withdrawAmount)) - expect(payeeBalance).to.equal(withdrawAmount) - }) - - it('cannot be called by a non-owner', async () => { - await expect( - upkeepBalanceMonitor.connect(stranger).withdraw(withdrawAmount, payee), - ).to.be.revertedWith('Only callable by owner') - }) - - it('should emit an event', async () => { - await expect( - upkeepBalanceMonitor.connect(owner).withdraw(withdrawAmount, payee), - ) - .to.emit(upkeepBalanceMonitor, 'FundsWithdrawn') - .withArgs(100, payee) - }) - }) - - describe('pause() and unpause()', () => { - it('should pause and unpause the contract', async () => { - await upkeepBalanceMonitor.connect(owner).pause() - expect(await upkeepBalanceMonitor.paused()).to.be.true - await upkeepBalanceMonitor.connect(owner).unpause() - expect(await upkeepBalanceMonitor.paused()).to.be.false - }) - - it('cannot be called by a non-owner', async () => { - await expect( - upkeepBalanceMonitor.connect(stranger).pause(), - ).to.be.revertedWith('Only callable by owner') - await upkeepBalanceMonitor.connect(owner).pause() - await expect( - upkeepBalanceMonitor.connect(stranger).unpause(), - ).to.be.revertedWith('Only callable by owner') - }) - }) - - describe('checkUpkeep() / getUnderfundedUpkeeps()', () => { - it('should find the underfunded upkeeps', async () => { - let [upkeepIDs, registries, topUpAmounts] = - await upkeepBalanceMonitor.getUnderfundedUpkeeps() - expect(upkeepIDs.length).to.equal(0) - expect(registries.length).to.equal(0) - expect(topUpAmounts.length).to.equal(0) - let [upkeepNeeded, performData] = - await upkeepBalanceMonitor.checkUpkeep('0x') - expect(upkeepNeeded).to.be.false - expect(performData).to.equal('0x') - // update the balance for some upkeeps - await registry.mock.getBalance.withArgs(2).returns(120) - await registry.mock.getBalance.withArgs(4).returns(15) - await registry.mock.getBalance.withArgs(5).returns(0) - ;[upkeepIDs, registries, topUpAmounts] = - await upkeepBalanceMonitor.getUnderfundedUpkeeps() - expect(upkeepIDs.map((v) => v.toNumber())).to.deep.equal([2, 4, 5]) - expect(registries).to.deep.equal([ - registry.address, - registry.address, - registry.address, - ]) - expect(topUpAmounts.map((v) => v.toNumber())).to.deep.equal([ - 180, 285, 300, - ]) - ;[upkeepNeeded, performData] = - await upkeepBalanceMonitor.checkUpkeep('0x') - expect(upkeepNeeded).to.be.true - expect(performData).to.equal( - ethers.utils.defaultAbiCoder.encode( - ['uint256[]', 'address[]', 'uint256[]'], - [ - [2, 4, 5], - [registry.address, registry.address, registry.address], - [180, 285, 300], - ], - ), - ) - // update all to need funding - for (let i = 0; i < 9; i++) { - await registry.mock.getBalance.withArgs(i).returns(0) - } - for (let i = 9; i < 12; i++) { - await registry2.mock.getBalance.withArgs(i).returns(0) - } - // only the max batch size are included in the list - ;[upkeepIDs, registries, topUpAmounts] = - await upkeepBalanceMonitor.getUnderfundedUpkeeps() - expect(upkeepIDs.length).to.equal(10) - expect(topUpAmounts.length).to.equal(10) - expect(upkeepIDs.map((v) => v.toNumber())).to.deep.equal([ - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, - ]) - expect(registries).to.deep.equal([ - ...Array(9).fill(registry.address), - registry2.address, - ]) - expect(topUpAmounts.map((v) => v.toNumber())).to.deep.equal([ - ...Array(10).fill(300), - ]) - // update the balance for some upkeeps - await registry.mock.getBalance.withArgs(0).returns(300) - await registry.mock.getBalance.withArgs(5).returns(300) - ;[upkeepIDs, registries, topUpAmounts] = - await upkeepBalanceMonitor.getUnderfundedUpkeeps() - expect(upkeepIDs.length).to.equal(10) - expect(topUpAmounts.length).to.equal(10) - expect(upkeepIDs.map((v) => v.toNumber())).to.deep.equal([ - 1, 2, 3, 4, 6, 7, 8, 9, 10, 11, - ]) - expect(registries).to.deep.equal([ - ...Array(7).fill(registry.address), - ...Array(3).fill(registry2.address), - ]) - expect(topUpAmounts.map((v) => v.toNumber())).to.deep.equal([ - ...Array(10).fill(300), - ]) - }) - }) - - describe('topUp()', () => { - beforeEach(async () => { - await registry.mock.onTokenTransfer - .withArgs( - upkeepBalanceMonitor.address, - 100, - ethers.utils.defaultAbiCoder.encode(['uint256'], [1]), - ) - .returns() - await registry.mock.onTokenTransfer - .withArgs( - upkeepBalanceMonitor.address, - 50, - ethers.utils.defaultAbiCoder.encode(['uint256'], [7]), - ) - .returns() - }) - - it('cannot be called by a non-owner', async () => { - await expect( - upkeepBalanceMonitor.connect(stranger).topUp([], [], []), - ).to.be.revertedWithCustomError( - upkeepBalanceMonitor, - 'OnlyForwarderOrOwner', - ) - }) - - it('should revert if the contract is paused', async () => { - await upkeepBalanceMonitor.connect(owner).pause() - await expect( - upkeepBalanceMonitor.connect(owner).topUp([], [], []), - ).to.be.revertedWith('Pausable: paused') - }) - - it('tops up the upkeeps by the amounts provided', async () => { - const initialBalance = await linkToken.balanceOf(registry.address) - const tx = await upkeepBalanceMonitor - .connect(owner) - .topUp([1, 7], [registry.address, registry.address], [100, 50]) - const finalBalance = await linkToken.balanceOf(registry.address) - expect(finalBalance).to.equal(initialBalance.add(150)) - await expect(tx) - .to.emit(upkeepBalanceMonitor, 'TopUpSucceeded') - .withArgs(1, 100) - await expect(tx) - .to.emit(upkeepBalanceMonitor, 'TopUpSucceeded') - .withArgs(7, 50) - }) - - it('does not abort if one top-up fails', async () => { - const initialBalance = await linkToken.balanceOf(registry.address) - const tx = await upkeepBalanceMonitor - .connect(owner) - .topUp( - [1, 7, 100], - [registry.address, registry.address, registry.address], - [100, 50, 100], - ) - const finalBalance = await linkToken.balanceOf(registry.address) - expect(finalBalance).to.equal(initialBalance.add(150)) - await expect(tx) - .to.emit(upkeepBalanceMonitor, 'TopUpSucceeded') - .withArgs(1, 100) - await expect(tx) - .to.emit(upkeepBalanceMonitor, 'TopUpSucceeded') - .withArgs(7, 50) - await expect(tx) - .to.emit(upkeepBalanceMonitor, 'TopUpFailed') - .withArgs(100) - }) - }) - - describe('checkUpkeep() / performUpkeep()', () => { - it('works round-trip', async () => { - await registry.mock.getBalance.withArgs(1).returns(100) // needs 200 - await registry.mock.getBalance.withArgs(7).returns(0) // needs 300 - await registry.mock.onTokenTransfer - .withArgs( - upkeepBalanceMonitor.address, - 200, - ethers.utils.defaultAbiCoder.encode(['uint256'], [1]), - ) - .returns() - await registry.mock.onTokenTransfer - .withArgs( - upkeepBalanceMonitor.address, - 300, - ethers.utils.defaultAbiCoder.encode(['uint256'], [7]), - ) - .returns() - const [upkeepNeeded, performData] = - await upkeepBalanceMonitor.checkUpkeep('0x') - expect(upkeepNeeded).to.be.true - const initialBalance = await linkToken.balanceOf(registry.address) - await upkeepBalanceMonitor.connect(owner).performUpkeep(performData) - const finalBalance = await linkToken.balanceOf(registry.address) - expect(finalBalance).to.equal(initialBalance.add(500)) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts b/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts deleted file mode 100644 index 7fd811d8226..00000000000 --- a/contracts/test/v0.8/automation/UpkeepTranscoder3_0.test.ts +++ /dev/null @@ -1,576 +0,0 @@ -import { ethers } from 'hardhat' -import { assert, expect } from 'chai' -import { UpkeepTranscoder30__factory as UpkeepTranscoderFactory } from '../../../typechain/factories/UpkeepTranscoder30__factory' -import { UpkeepTranscoder30 as UpkeepTranscoder } from '../../../typechain/UpkeepTranscoder30' -import { KeeperRegistry2_0__factory as KeeperRegistry2_0Factory } from '../../../typechain/factories/KeeperRegistry2_0__factory' -import { LinkToken__factory as LinkTokenFactory } from '../../../typechain/factories/LinkToken__factory' -import { MockV3Aggregator__factory as MockV3AggregatorFactory } from '../../../typechain/factories/MockV3Aggregator__factory' -import { UpkeepMock__factory as UpkeepMockFactory } from '../../../typechain/factories/UpkeepMock__factory' -import { evmRevert } from '../../test-helpers/matchers' -import { BigNumber, Signer } from 'ethers' -import { getUsers, Personas } from '../../test-helpers/setup' -import { KeeperRegistryLogic2_0__factory as KeeperRegistryLogic20Factory } from '../../../typechain/factories/KeeperRegistryLogic2_0__factory' -import { KeeperRegistry1_3__factory as KeeperRegistry1_3Factory } from '../../../typechain/factories/KeeperRegistry1_3__factory' -import { KeeperRegistryLogic1_3__factory as KeeperRegistryLogicFactory } from '../../../typechain/factories/KeeperRegistryLogic1_3__factory' -import { toWei } from '../../test-helpers/helpers' -import { LinkToken } from '../../../typechain' - -let upkeepMockFactory: UpkeepMockFactory -let upkeepTranscoderFactory: UpkeepTranscoderFactory -let transcoder: UpkeepTranscoder -let linkTokenFactory: LinkTokenFactory -let mockV3AggregatorFactory: MockV3AggregatorFactory -let keeperRegistryFactory20: KeeperRegistry2_0Factory -let keeperRegistryFactory13: KeeperRegistry1_3Factory -let keeperRegistryLogicFactory20: KeeperRegistryLogic20Factory -let keeperRegistryLogicFactory13: KeeperRegistryLogicFactory -let personas: Personas -let owner: Signer -let upkeepsV1: any[] -let upkeepsV2: any[] -let upkeepsV3: any[] -let admins: string[] -let admin0: Signer -let admin1: Signer -const executeGas = BigNumber.from('100000') -const paymentPremiumPPB = BigNumber.from('250000000') -const flatFeeMicroLink = BigNumber.from(0) -const blockCountPerTurn = BigNumber.from(3) -const randomBytes = '0x1234abcd' -const stalenessSeconds = BigNumber.from(43820) -const gasCeilingMultiplier = BigNumber.from(1) -const checkGasLimit = BigNumber.from(20000000) -const fallbackGasPrice = BigNumber.from(200) -const fallbackLinkPrice = BigNumber.from(200000000) -const maxPerformGas = BigNumber.from(5000000) -const minUpkeepSpend = BigNumber.from(0) -const maxCheckDataSize = BigNumber.from(1000) -const maxPerformDataSize = BigNumber.from(1000) -const mode = BigNumber.from(0) -const linkEth = BigNumber.from(300000000) -const gasWei = BigNumber.from(100) -const registryGasOverhead = BigNumber.from('80000') -const balance = 50000000000000 -const amountSpent = 200000000000000 -const target0 = '0xffffffffffffffffffffffffffffffffffffffff' -const target1 = '0xfffffffffffffffffffffffffffffffffffffffe' -const lastKeeper0 = '0x233a95ccebf3c9f934482c637c08b4015cdd6ddd' -const lastKeeper1 = '0x233a95ccebf3c9f934482c637c08b4015cdd6ddc' -enum UpkeepFormat { - V1, - V2, - V3, - V4, -} -const idx = [123, 124] - -async function getUpkeepID(tx: any) { - const receipt = await tx.wait() - return receipt.events[0].args.id -} - -const encodeConfig = (config: any) => { - return ethers.utils.defaultAbiCoder.encode( - [ - 'tuple(uint32 paymentPremiumPPB,uint32 flatFeeMicroLink,uint32 checkGasLimit,uint24 stalenessSeconds\ - ,uint16 gasCeilingMultiplier,uint96 minUpkeepSpend,uint32 maxPerformGas,uint32 maxCheckDataSize,\ - uint32 maxPerformDataSize,uint256 fallbackGasPrice,uint256 fallbackLinkPrice,address transcoder,\ - address registrar)', - ], - [config], - ) -} - -const encodeUpkeepV1 = (ids: number[], upkeeps: any[], checkDatas: any[]) => { - return ethers.utils.defaultAbiCoder.encode( - [ - 'uint256[]', - 'tuple(uint96,address,uint32,uint64,address,uint96,address)[]', - 'bytes[]', - ], - [ids, upkeeps, checkDatas], - ) -} - -const encodeUpkeepV2 = (ids: number[], upkeeps: any[], checkDatas: any[]) => { - return ethers.utils.defaultAbiCoder.encode( - [ - 'uint256[]', - 'tuple(uint96,address,uint96,address,uint32,uint32,address,bool)[]', - 'bytes[]', - ], - [ids, upkeeps, checkDatas], - ) -} - -const encodeUpkeepV3 = ( - ids: number[], - upkeeps: any[], - checkDatas: any[], - admins: string[], -) => { - return ethers.utils.defaultAbiCoder.encode( - [ - 'uint256[]', - 'tuple(uint32,uint32,bool,address,uint96,uint96,uint32)[]', - 'bytes[]', - 'address[]', - ], - [ids, upkeeps, checkDatas, admins], - ) -} - -before(async () => { - // @ts-ignore bug in autogen file - upkeepTranscoderFactory = await ethers.getContractFactory( - 'UpkeepTranscoder3_0', - ) - personas = (await getUsers()).personas - - linkTokenFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - ) - // need full path because there are two contracts with name MockV3Aggregator - mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', - )) as unknown as MockV3AggregatorFactory - - upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') - - owner = personas.Norbert - admin0 = personas.Neil - admin1 = personas.Nick - admins = [ - (await admin0.getAddress()).toLowerCase(), - (await admin1.getAddress()).toLowerCase(), - ] -}) - -async function deployLinkToken() { - return await linkTokenFactory.connect(owner).deploy() -} - -async function deployFeeds() { - return [ - await mockV3AggregatorFactory.connect(owner).deploy(0, gasWei), - await mockV3AggregatorFactory.connect(owner).deploy(9, linkEth), - ] -} - -async function deployLegacyRegistry1_2( - linkToken: LinkToken, - gasPriceFeed: any, - linkEthFeed: any, -) { - const mock = await upkeepMockFactory.deploy() - // @ts-ignore bug in autogen file - const keeperRegistryFactory = - await ethers.getContractFactory('KeeperRegistry1_2') - transcoder = await upkeepTranscoderFactory.connect(owner).deploy() - const legacyRegistry = await keeperRegistryFactory - .connect(owner) - .deploy(linkToken.address, linkEthFeed.address, gasPriceFeed.address, { - paymentPremiumPPB, - flatFeeMicroLink, - blockCountPerTurn, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrar: ethers.constants.AddressZero, - }) - const tx = await legacyRegistry - .connect(owner) - .registerUpkeep( - mock.address, - executeGas, - await admin0.getAddress(), - randomBytes, - ) - const id = await getUpkeepID(tx) - return [id, legacyRegistry] -} - -async function deployLegacyRegistry1_3( - linkToken: LinkToken, - gasPriceFeed: any, - linkEthFeed: any, -) { - const mock = await upkeepMockFactory.deploy() - // @ts-ignore bug in autogen file - keeperRegistryFactory13 = await ethers.getContractFactory('KeeperRegistry1_3') - // @ts-ignore bug in autogen file - keeperRegistryLogicFactory13 = await ethers.getContractFactory( - 'KeeperRegistryLogic1_3', - ) - - const registryLogic13 = await keeperRegistryLogicFactory13 - .connect(owner) - .deploy( - 0, - registryGasOverhead, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - ) - - const config = { - paymentPremiumPPB, - flatFeeMicroLink, - blockCountPerTurn, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrar: ethers.constants.AddressZero, - } - const Registry1_3 = await keeperRegistryFactory13 - .connect(owner) - .deploy(registryLogic13.address, config) - - const tx = await Registry1_3.connect(owner).registerUpkeep( - mock.address, - executeGas, - await admin0.getAddress(), - randomBytes, - ) - const id = await getUpkeepID(tx) - - return [id, Registry1_3] -} - -async function deployRegistry2_0( - linkToken: LinkToken, - gasPriceFeed: any, - linkEthFeed: any, -) { - // @ts-ignore bug in autogen file - keeperRegistryFactory20 = await ethers.getContractFactory('KeeperRegistry2_0') - // @ts-ignore bug in autogen file - keeperRegistryLogicFactory20 = await ethers.getContractFactory( - 'KeeperRegistryLogic2_0', - ) - - const config = { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrar: ethers.constants.AddressZero, - } - - const registryLogic = await keeperRegistryLogicFactory20 - .connect(owner) - .deploy(mode, linkToken.address, linkEthFeed.address, gasPriceFeed.address) - - const Registry2_0 = await keeperRegistryFactory20 - .connect(owner) - .deploy(registryLogic.address) - - // deploys a registry, setups of initial configuration, registers an upkeep - const keeper1 = personas.Carol - const keeper2 = personas.Eddy - const keeper3 = personas.Nancy - const keeper4 = personas.Norbert - const keeper5 = personas.Nick - const payee1 = personas.Nelly - const payee2 = personas.Norbert - const payee3 = personas.Nick - const payee4 = personas.Eddy - const payee5 = personas.Carol - // signers - const signer1 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000001', - ) - const signer2 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000002', - ) - const signer3 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000003', - ) - const signer4 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000004', - ) - const signer5 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000005', - ) - - const keeperAddresses = [ - await keeper1.getAddress(), - await keeper2.getAddress(), - await keeper3.getAddress(), - await keeper4.getAddress(), - await keeper5.getAddress(), - ] - const payees = [ - await payee1.getAddress(), - await payee2.getAddress(), - await payee3.getAddress(), - await payee4.getAddress(), - await payee5.getAddress(), - ] - const signers = [signer1, signer2, signer3, signer4, signer5] - - const signerAddresses = [] - for (const signer of signers) { - signerAddresses.push(await signer.getAddress()) - } - - const f = 1 - const offchainVersion = 1 - const offchainBytes = '0x' - - await Registry2_0.connect(owner).setConfig( - signerAddresses, - keeperAddresses, - f, - encodeConfig(config), - offchainVersion, - offchainBytes, - ) - await Registry2_0.connect(owner).setPayees(payees) - return Registry2_0 -} - -describe('UpkeepTranscoder3_0', () => { - beforeEach(async () => { - transcoder = await upkeepTranscoderFactory.connect(owner).deploy() - }) - - describe('#typeAndVersion', () => { - it('uses the correct type and version', async () => { - const typeAndVersion = await transcoder.typeAndVersion() - assert.equal(typeAndVersion, 'UpkeepTranscoder 3.0.0') - }) - }) - - describe('#transcodeUpkeeps', () => { - const encodedData = '0xabcd' - - it('reverts if the from type is not V1 or V2', async () => { - await evmRevert( - transcoder.transcodeUpkeeps( - UpkeepFormat.V3, - UpkeepFormat.V1, - encodedData, - ), - ) - await evmRevert( - transcoder.transcodeUpkeeps( - UpkeepFormat.V4, - UpkeepFormat.V1, - encodedData, - ), - ) - }) - - context('when from and to versions are correct', () => { - upkeepsV3 = [ - [executeGas, 2 ** 32 - 1, false, target0, amountSpent, balance, 0], - [executeGas, 2 ** 32 - 1, false, target1, amountSpent, balance, 0], - ] - - it('transcodes V1 upkeeps to V3 properly, regardless of toVersion value', async () => { - upkeepsV1 = [ - [ - balance, - lastKeeper0, - executeGas, - 2 ** 32, - target0, - amountSpent, - await admin0.getAddress(), - ], - [ - balance, - lastKeeper1, - executeGas, - 2 ** 32, - target1, - amountSpent, - await admin1.getAddress(), - ], - ] - - const data = await transcoder.transcodeUpkeeps( - UpkeepFormat.V1, - UpkeepFormat.V1, - encodeUpkeepV1(idx, upkeepsV1, ['0xabcd', '0xffff']), - ) - assert.equal( - encodeUpkeepV3(idx, upkeepsV3, ['0xabcd', '0xffff'], admins), - data, - ) - }) - - it('transcodes V2 upkeeps to V3 properly, regardless of toVersion value', async () => { - upkeepsV2 = [ - [ - balance, - lastKeeper0, - amountSpent, - await admin0.getAddress(), - executeGas, - 2 ** 32 - 1, - target0, - false, - ], - [ - balance, - lastKeeper1, - amountSpent, - await admin1.getAddress(), - executeGas, - 2 ** 32 - 1, - target1, - false, - ], - ] - - const data = await transcoder.transcodeUpkeeps( - UpkeepFormat.V2, - UpkeepFormat.V2, - encodeUpkeepV2(idx, upkeepsV2, ['0xabcd', '0xffff']), - ) - assert.equal( - encodeUpkeepV3(idx, upkeepsV3, ['0xabcd', '0xffff'], admins), - data, - ) - }) - - it('migrates upkeeps from 1.2 registry to 2.0', async () => { - const linkToken = await deployLinkToken() - const [gasPriceFeed, linkEthFeed] = await deployFeeds() - const [id, legacyRegistry] = await deployLegacyRegistry1_2( - linkToken, - gasPriceFeed, - linkEthFeed, - ) - const Registry2_0 = await deployRegistry2_0( - linkToken, - gasPriceFeed, - linkEthFeed, - ) - - await linkToken - .connect(owner) - .approve(legacyRegistry.address, toWei('1000')) - await legacyRegistry.connect(owner).addFunds(id, toWei('1000')) - - // set outgoing permission to registry 2_0 and incoming permission for registry 1_2 - await legacyRegistry.setPeerRegistryMigrationPermission( - Registry2_0.address, - 1, - ) - await Registry2_0.setPeerRegistryMigrationPermission( - legacyRegistry.address, - 2, - ) - - expect((await legacyRegistry.getUpkeep(id)).balance).to.equal( - toWei('1000'), - ) - expect((await legacyRegistry.getUpkeep(id)).checkData).to.equal( - randomBytes, - ) - expect((await legacyRegistry.getState()).state.numUpkeeps).to.equal(1) - - await legacyRegistry - .connect(admin0) - .migrateUpkeeps([id], Registry2_0.address) - - expect((await legacyRegistry.getState()).state.numUpkeeps).to.equal(0) - expect((await Registry2_0.getState()).state.numUpkeeps).to.equal(1) - expect((await legacyRegistry.getUpkeep(id)).balance).to.equal(0) - expect((await legacyRegistry.getUpkeep(id)).checkData).to.equal('0x') - expect((await Registry2_0.getUpkeep(id)).balance).to.equal( - toWei('1000'), - ) - expect( - (await Registry2_0.getState()).state.expectedLinkBalance, - ).to.equal(toWei('1000')) - expect(await linkToken.balanceOf(Registry2_0.address)).to.equal( - toWei('1000'), - ) - expect((await Registry2_0.getUpkeep(id)).checkData).to.equal( - randomBytes, - ) - }) - - it('migrates upkeeps from 1.3 registry to 2.0', async () => { - const linkToken = await deployLinkToken() - const [gasPriceFeed, linkEthFeed] = await deployFeeds() - const [id, legacyRegistry] = await deployLegacyRegistry1_3( - linkToken, - gasPriceFeed, - linkEthFeed, - ) - const Registry2_0 = await deployRegistry2_0( - linkToken, - gasPriceFeed, - linkEthFeed, - ) - - await linkToken - .connect(owner) - .approve(legacyRegistry.address, toWei('1000')) - await legacyRegistry.connect(owner).addFunds(id, toWei('1000')) - - // set outgoing permission to registry 2_0 and incoming permission for registry 1_3 - await legacyRegistry.setPeerRegistryMigrationPermission( - Registry2_0.address, - 1, - ) - await Registry2_0.setPeerRegistryMigrationPermission( - legacyRegistry.address, - 2, - ) - - expect((await legacyRegistry.getUpkeep(id)).balance).to.equal( - toWei('1000'), - ) - expect((await legacyRegistry.getUpkeep(id)).checkData).to.equal( - randomBytes, - ) - expect((await legacyRegistry.getState()).state.numUpkeeps).to.equal(1) - - await legacyRegistry - .connect(admin0) - .migrateUpkeeps([id], Registry2_0.address) - - expect((await legacyRegistry.getState()).state.numUpkeeps).to.equal(0) - expect((await Registry2_0.getState()).state.numUpkeeps).to.equal(1) - expect((await legacyRegistry.getUpkeep(id)).balance).to.equal(0) - expect((await legacyRegistry.getUpkeep(id)).checkData).to.equal('0x') - expect((await Registry2_0.getUpkeep(id)).balance).to.equal( - toWei('1000'), - ) - expect( - (await Registry2_0.getState()).state.expectedLinkBalance, - ).to.equal(toWei('1000')) - expect(await linkToken.balanceOf(Registry2_0.address)).to.equal( - toWei('1000'), - ) - expect((await Registry2_0.getUpkeep(id)).checkData).to.equal( - randomBytes, - ) - }) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts b/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts deleted file mode 100644 index b49dfb1d5b4..00000000000 --- a/contracts/test/v0.8/automation/UpkeepTranscoder4_0.test.ts +++ /dev/null @@ -1,654 +0,0 @@ -import { ethers } from 'hardhat' -import { assert, expect } from 'chai' -import { UpkeepTranscoder4_0 as UpkeepTranscoder } from '../../../typechain/UpkeepTranscoder4_0' -import { KeeperRegistry2_0__factory as KeeperRegistry2_0Factory } from '../../../typechain/factories/KeeperRegistry2_0__factory' -import { LinkToken__factory as LinkTokenFactory } from '../../../typechain/factories/LinkToken__factory' -import { MockV3Aggregator__factory as MockV3AggregatorFactory } from '../../../typechain/factories/MockV3Aggregator__factory' -import { evmRevert } from '../../test-helpers/matchers' -import { BigNumber, Signer } from 'ethers' -import { getUsers, Personas } from '../../test-helpers/setup' -import { KeeperRegistryLogic2_0__factory as KeeperRegistryLogic20Factory } from '../../../typechain/factories/KeeperRegistryLogic2_0__factory' -import { KeeperRegistry1_3__factory as KeeperRegistry1_3Factory } from '../../../typechain/factories/KeeperRegistry1_3__factory' -import { KeeperRegistryLogic1_3__factory as KeeperRegistryLogicFactory } from '../../../typechain/factories/KeeperRegistryLogic1_3__factory' -import { UpkeepTranscoder4_0__factory as UpkeepTranscoderFactory } from '../../../typechain/factories/UpkeepTranscoder4_0__factory' -import { toWei } from '../../test-helpers/helpers' -import { loadFixture } from '@nomicfoundation/hardhat-network-helpers' -import { - IKeeperRegistryMaster, - KeeperRegistry1_2, - KeeperRegistry1_3, - KeeperRegistry2_0, - LinkToken, - MockV3Aggregator, - UpkeepMock, -} from '../../../typechain' -import { deployRegistry21 } from './helpers' - -////////////////////////////////////////////////////////////////////////////////////////////////// -////////////////////////////////////////////////////////////////////////////////////////////////// - -/*********************************** TRANSCODER v4.0 IS FROZEN ************************************/ - -// We are leaving the original tests enabled, however as automation v2.1 is still actively being deployed - -describe('UpkeepTranscoder v4.0 - Frozen [ @skip-coverage ]', () => { - it('has not changed', () => { - assert.equal( - ethers.utils.id(UpkeepTranscoderFactory.bytecode), - '0xf22c4701b0088e6e69c389a34a22041a69f00890a89246e3c2a6d38172222dae', - 'UpkeepTranscoder bytecode has changed', - ) - }) -}) - -////////////////////////////////////////////////////////////////////////////////////////////////// -////////////////////////////////////////////////////////////////////////////////////////////////// - -let transcoder: UpkeepTranscoder -let linkTokenFactory: LinkTokenFactory -let keeperRegistryFactory20: KeeperRegistry2_0Factory -let keeperRegistryFactory13: KeeperRegistry1_3Factory -let keeperRegistryLogicFactory20: KeeperRegistryLogic20Factory -let keeperRegistryLogicFactory13: KeeperRegistryLogicFactory -let linkToken: LinkToken -let registry12: KeeperRegistry1_2 -let registry13: KeeperRegistry1_3 -let registry20: KeeperRegistry2_0 -let registry21: IKeeperRegistryMaster -let gasPriceFeed: MockV3Aggregator -let linkEthFeed: MockV3Aggregator -let mock: UpkeepMock -let personas: Personas -let owner: Signer -let upkeepsV12: any[] -let upkeepsV13: any[] -let upkeepsV21: any[] -let admins: string[] -let admin0: Signer -let admin1: Signer -let id12: BigNumber -let id13: BigNumber -let id20: BigNumber -const executeGas = BigNumber.from('100000') -const paymentPremiumPPB = BigNumber.from('250000000') -const flatFeeMicroLink = BigNumber.from(0) -const blockCountPerTurn = BigNumber.from(3) -const randomBytes = '0x1234abcd' -const stalenessSeconds = BigNumber.from(43820) -const gasCeilingMultiplier = BigNumber.from(1) -const checkGasLimit = BigNumber.from(20000000) -const fallbackGasPrice = BigNumber.from(200) -const fallbackLinkPrice = BigNumber.from(200000000) -const maxPerformGas = BigNumber.from(5000000) -const minUpkeepSpend = BigNumber.from(0) -const maxCheckDataSize = BigNumber.from(1000) -const maxPerformDataSize = BigNumber.from(1000) -const mode = BigNumber.from(0) -const linkEth = BigNumber.from(300000000) -const gasWei = BigNumber.from(100) -const registryGasOverhead = BigNumber.from('80000') -const balance = 50000000000000 -const amountSpent = 200000000000000 -const { AddressZero } = ethers.constants -const target0 = '0xffffffffffffffffffffffffffffffffffffffff' -const target1 = '0xfffffffffffffffffffffffffffffffffffffffe' -const lastKeeper0 = '0x233a95ccebf3c9f934482c637c08b4015cdd6ddd' -const lastKeeper1 = '0x233a95ccebf3c9f934482c637c08b4015cdd6ddc' - -const f = 1 -const offchainVersion = 1 -const offchainBytes = '0x' -let keeperAddresses: string[] -let signerAddresses: string[] -let payees: string[] - -enum UpkeepFormat { - V12, - V13, - V20, - V21, - V30, // Does not exist -} -const idx = [123, 124] - -async function getUpkeepID(tx: any): Promise { - const receipt = await tx.wait() - return receipt.events[0].args.id -} - -const encodeConfig20 = (config: any) => { - return ethers.utils.defaultAbiCoder.encode( - [ - 'tuple(uint32 paymentPremiumPPB,uint32 flatFeeMicroLink,uint32 checkGasLimit,uint24 stalenessSeconds\ - ,uint16 gasCeilingMultiplier,uint96 minUpkeepSpend,uint32 maxPerformGas,uint32 maxCheckDataSize,\ - uint32 maxPerformDataSize,uint256 fallbackGasPrice,uint256 fallbackLinkPrice,address transcoder,\ - address registrar)', - ], - [config], - ) -} - -const encodeUpkeepV12 = (ids: number[], upkeeps: any[], checkDatas: any[]) => { - return ethers.utils.defaultAbiCoder.encode( - [ - 'uint256[]', - 'tuple(uint96,address,uint32,uint64,address,uint96,address)[]', - 'bytes[]', - ], - [ids, upkeeps, checkDatas], - ) -} - -async function deployRegistry1_2(): Promise<[BigNumber, KeeperRegistry1_2]> { - const keeperRegistryFactory = - await ethers.getContractFactory('KeeperRegistry1_2') - const registry12 = await keeperRegistryFactory - .connect(owner) - .deploy(linkToken.address, linkEthFeed.address, gasPriceFeed.address, { - paymentPremiumPPB, - flatFeeMicroLink, - blockCountPerTurn, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrar: ethers.constants.AddressZero, - }) - const tx = await registry12 - .connect(owner) - .registerUpkeep( - mock.address, - executeGas, - await admin0.getAddress(), - randomBytes, - ) - const id = await getUpkeepID(tx) - return [id, registry12] -} - -async function deployRegistry1_3(): Promise<[BigNumber, KeeperRegistry1_3]> { - keeperRegistryFactory13 = await ethers.getContractFactory('KeeperRegistry1_3') - keeperRegistryLogicFactory13 = await ethers.getContractFactory( - 'KeeperRegistryLogic1_3', - ) - - const registryLogic13 = await keeperRegistryLogicFactory13 - .connect(owner) - .deploy( - 0, - registryGasOverhead, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - ) - - const config = { - paymentPremiumPPB, - flatFeeMicroLink, - blockCountPerTurn, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrar: ethers.constants.AddressZero, - } - const registry13 = await keeperRegistryFactory13 - .connect(owner) - .deploy(registryLogic13.address, config) - - const tx = await registry13 - .connect(owner) - .registerUpkeep( - mock.address, - executeGas, - await admin0.getAddress(), - randomBytes, - ) - const id = await getUpkeepID(tx) - - return [id, registry13] -} - -async function deployRegistry2_0(): Promise<[BigNumber, KeeperRegistry2_0]> { - keeperRegistryFactory20 = await ethers.getContractFactory('KeeperRegistry2_0') - keeperRegistryLogicFactory20 = await ethers.getContractFactory( - 'KeeperRegistryLogic2_0', - ) - - const config = { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: transcoder.address, - registrar: ethers.constants.AddressZero, - } - - const registryLogic = await keeperRegistryLogicFactory20 - .connect(owner) - .deploy(mode, linkToken.address, linkEthFeed.address, gasPriceFeed.address) - - const registry20 = await keeperRegistryFactory20 - .connect(owner) - .deploy(registryLogic.address) - - await registry20 - .connect(owner) - .setConfig( - signerAddresses, - keeperAddresses, - f, - encodeConfig20(config), - offchainVersion, - offchainBytes, - ) - await registry20.connect(owner).setPayees(payees) - - const tx = await registry20 - .connect(owner) - .registerUpkeep( - mock.address, - executeGas, - await admin0.getAddress(), - randomBytes, - randomBytes, - ) - const id = await getUpkeepID(tx) - - return [id, registry20] -} - -async function deployRegistry2_1() { - const registry = await deployRegistry21( - owner, - mode, - linkToken.address, - linkEthFeed.address, - gasPriceFeed.address, - ) - - const onchainConfig = { - paymentPremiumPPB, - flatFeeMicroLink, - checkGasLimit, - stalenessSeconds, - gasCeilingMultiplier, - minUpkeepSpend, - maxCheckDataSize, - maxPerformDataSize, - maxRevertDataSize: 1000, - maxPerformGas, - fallbackGasPrice, - fallbackLinkPrice, - transcoder: ethers.constants.AddressZero, - registrars: [], - upkeepPrivilegeManager: await owner.getAddress(), - } - - await registry - .connect(owner) - .setConfigTypeSafe( - signerAddresses, - keeperAddresses, - f, - onchainConfig, - offchainVersion, - offchainBytes, - ) - - return registry -} - -const setup = async () => { - personas = (await getUsers()).personas - owner = personas.Norbert - admin0 = personas.Neil - admin1 = personas.Nick - admins = [ - (await admin0.getAddress()).toLowerCase(), - (await admin1.getAddress()).toLowerCase(), - ] - - const upkeepTranscoderFactory = await ethers.getContractFactory( - 'UpkeepTranscoder4_0', - ) - transcoder = await upkeepTranscoderFactory.connect(owner).deploy() - - linkTokenFactory = await ethers.getContractFactory( - 'src/v0.8/shared/test/helpers/LinkTokenTestHelper.sol:LinkTokenTestHelper', - ) - linkToken = await linkTokenFactory.connect(owner).deploy() - // need full path because there are two contracts with name MockV3Aggregator - const mockV3AggregatorFactory = (await ethers.getContractFactory( - 'src/v0.8/shared/mocks/MockV3Aggregator.sol:MockV3Aggregator', - )) as unknown as MockV3AggregatorFactory - - gasPriceFeed = await mockV3AggregatorFactory.connect(owner).deploy(0, gasWei) - linkEthFeed = await mockV3AggregatorFactory.connect(owner).deploy(9, linkEth) - - const upkeepMockFactory = await ethers.getContractFactory('UpkeepMock') - mock = await upkeepMockFactory.deploy() - - const keeper1 = personas.Carol - const keeper2 = personas.Eddy - const keeper3 = personas.Nancy - const keeper4 = personas.Norbert - const keeper5 = personas.Nick - const payee1 = personas.Nelly - const payee2 = personas.Norbert - const payee3 = personas.Nick - const payee4 = personas.Eddy - const payee5 = personas.Carol - // signers - const signer1 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000001', - ) - const signer2 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000002', - ) - const signer3 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000003', - ) - const signer4 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000004', - ) - const signer5 = new ethers.Wallet( - '0x7777777000000000000000000000000000000000000000000000000000000005', - ) - - keeperAddresses = [ - await keeper1.getAddress(), - await keeper2.getAddress(), - await keeper3.getAddress(), - await keeper4.getAddress(), - await keeper5.getAddress(), - ] - - payees = [ - await payee1.getAddress(), - await payee2.getAddress(), - await payee3.getAddress(), - await payee4.getAddress(), - await payee5.getAddress(), - ] - const signers = [signer1, signer2, signer3, signer4, signer5] - - signerAddresses = signers.map((signer) => signer.address) - ;[id12, registry12] = await deployRegistry1_2() - ;[id13, registry13] = await deployRegistry1_3() - ;[id20, registry20] = await deployRegistry2_0() - registry21 = await deployRegistry2_1() - - upkeepsV12 = [ - [ - balance, - lastKeeper0, - executeGas, - 2 ** 32, - target0, - amountSpent, - await admin0.getAddress(), - ], - [ - balance, - lastKeeper1, - executeGas, - 2 ** 32, - target1, - amountSpent, - await admin1.getAddress(), - ], - ] - - upkeepsV13 = [ - [ - balance, - lastKeeper0, - amountSpent, - await admin0.getAddress(), - executeGas, - 2 ** 32 - 1, - target0, - false, - ], - [ - balance, - lastKeeper1, - amountSpent, - await admin1.getAddress(), - executeGas, - 2 ** 32 - 1, - target1, - false, - ], - ] - - upkeepsV21 = [ - [ - false, - executeGas, - 2 ** 32 - 1, - AddressZero, // forwarder will always be zero - amountSpent, - balance, - 0, - target0, - ], - [ - false, - executeGas, - 2 ** 32 - 1, - AddressZero, // forwarder will always be zero - amountSpent, - balance, - 0, - target1, - ], - ] -} - -describe('UpkeepTranscoder4_0', () => { - beforeEach(async () => { - await loadFixture(setup) - }) - - describe('#typeAndVersion', () => { - it('uses the correct type and version', async () => { - const typeAndVersion = await transcoder.typeAndVersion() - assert.equal(typeAndVersion, 'UpkeepTranscoder 4.0.0') - }) - }) - - describe('#transcodeUpkeeps', () => { - const encodedData = '0xabcd' - - it('reverts if the from type is not v1.2, v1.3, v2.0, or v2.1', async () => { - await evmRevert( - transcoder.transcodeUpkeeps( - UpkeepFormat.V30, - UpkeepFormat.V12, - encodedData, - ), - ) - }) - - context('when from version is correct', () => { - // note this is a bugfix - the "to" version should be accounted for in - // future versions of the transcoder - it('transcodes to v2.1, regardless of toVersion value', async () => { - const data1 = await transcoder.transcodeUpkeeps( - UpkeepFormat.V12, - UpkeepFormat.V12, - encodeUpkeepV12(idx, upkeepsV12, ['0xabcd', '0xffff']), - ) - const data2 = await transcoder.transcodeUpkeeps( - UpkeepFormat.V12, - UpkeepFormat.V13, - encodeUpkeepV12(idx, upkeepsV12, ['0xabcd', '0xffff']), - ) - const data3 = await transcoder.transcodeUpkeeps( - UpkeepFormat.V12, - 100, - encodeUpkeepV12(idx, upkeepsV12, ['0xabcd', '0xffff']), - ) - assert.equal(data1, data2) - assert.equal(data1, data3) - }) - - it('migrates upkeeps from 1.2 registry to 2.1', async () => { - await linkToken - .connect(owner) - .approve(registry12.address, toWei('1000')) - await registry12.connect(owner).addFunds(id12, toWei('1000')) - - await registry12.setPeerRegistryMigrationPermission( - registry21.address, - 1, - ) - await registry21.setPeerRegistryMigrationPermission( - registry12.address, - 2, - ) - - expect((await registry12.getUpkeep(id12)).balance).to.equal( - toWei('1000'), - ) - expect((await registry12.getUpkeep(id12)).checkData).to.equal( - randomBytes, - ) - expect((await registry12.getState()).state.numUpkeeps).to.equal(1) - - await registry12 - .connect(admin0) - .migrateUpkeeps([id12], registry21.address) - - expect((await registry12.getState()).state.numUpkeeps).to.equal(0) - expect((await registry21.getState()).state.numUpkeeps).to.equal(1) - expect((await registry12.getUpkeep(id12)).balance).to.equal(0) - expect((await registry12.getUpkeep(id12)).checkData).to.equal('0x') - expect((await registry21.getUpkeep(id12)).balance).to.equal( - toWei('1000'), - ) - expect( - (await registry21.getState()).state.expectedLinkBalance, - ).to.equal(toWei('1000')) - expect(await linkToken.balanceOf(registry21.address)).to.equal( - toWei('1000'), - ) - expect((await registry21.getUpkeep(id12)).checkData).to.equal( - randomBytes, - ) - expect((await registry21.getUpkeep(id12)).offchainConfig).to.equal('0x') - expect(await registry21.getUpkeepTriggerConfig(id12)).to.equal('0x') - }) - - it('migrates upkeeps from 1.3 registry to 2.1', async () => { - await linkToken - .connect(owner) - .approve(registry13.address, toWei('1000')) - await registry13.connect(owner).addFunds(id13, toWei('1000')) - - await registry13.setPeerRegistryMigrationPermission( - registry21.address, - 1, - ) - await registry21.setPeerRegistryMigrationPermission( - registry13.address, - 2, - ) - - expect((await registry13.getUpkeep(id13)).balance).to.equal( - toWei('1000'), - ) - expect((await registry13.getUpkeep(id13)).checkData).to.equal( - randomBytes, - ) - expect((await registry13.getState()).state.numUpkeeps).to.equal(1) - - await registry13 - .connect(admin0) - .migrateUpkeeps([id13], registry21.address) - - expect((await registry13.getState()).state.numUpkeeps).to.equal(0) - expect((await registry21.getState()).state.numUpkeeps).to.equal(1) - expect((await registry13.getUpkeep(id13)).balance).to.equal(0) - expect((await registry13.getUpkeep(id13)).checkData).to.equal('0x') - expect((await registry21.getUpkeep(id13)).balance).to.equal( - toWei('1000'), - ) - expect( - (await registry21.getState()).state.expectedLinkBalance, - ).to.equal(toWei('1000')) - expect(await linkToken.balanceOf(registry21.address)).to.equal( - toWei('1000'), - ) - expect((await registry21.getUpkeep(id13)).checkData).to.equal( - randomBytes, - ) - expect((await registry21.getUpkeep(id13)).offchainConfig).to.equal('0x') - expect(await registry21.getUpkeepTriggerConfig(id13)).to.equal('0x') - }) - - it('migrates upkeeps from 2.0 registry to 2.1', async () => { - await linkToken - .connect(owner) - .approve(registry20.address, toWei('1000')) - await registry20.connect(owner).addFunds(id20, toWei('1000')) - - await registry20.setPeerRegistryMigrationPermission( - registry21.address, - 1, - ) - await registry21.setPeerRegistryMigrationPermission( - registry20.address, - 2, - ) - - expect((await registry20.getUpkeep(id20)).balance).to.equal( - toWei('1000'), - ) - expect((await registry20.getUpkeep(id20)).checkData).to.equal( - randomBytes, - ) - expect((await registry20.getState()).state.numUpkeeps).to.equal(1) - - await registry20 - .connect(admin0) - .migrateUpkeeps([id20], registry21.address) - - expect((await registry20.getState()).state.numUpkeeps).to.equal(0) - expect((await registry21.getState()).state.numUpkeeps).to.equal(1) - expect((await registry20.getUpkeep(id20)).balance).to.equal(0) - expect((await registry20.getUpkeep(id20)).checkData).to.equal('0x') - expect((await registry21.getUpkeep(id20)).balance).to.equal( - toWei('1000'), - ) - expect( - (await registry21.getState()).state.expectedLinkBalance, - ).to.equal(toWei('1000')) - expect(await linkToken.balanceOf(registry21.address)).to.equal( - toWei('1000'), - ) - expect((await registry21.getUpkeep(id20)).checkData).to.equal( - randomBytes, - ) - expect(await registry21.getUpkeepTriggerConfig(id20)).to.equal('0x') - }) - }) - }) -}) diff --git a/contracts/test/v0.8/automation/helpers.ts b/contracts/test/v0.8/automation/helpers.ts index 99f2cef9b87..130bdcbfecf 100644 --- a/contracts/test/v0.8/automation/helpers.ts +++ b/contracts/test/v0.8/automation/helpers.ts @@ -1,11 +1,5 @@ import { Signer } from 'ethers' import { ethers } from 'hardhat' -import { KeeperRegistryLogicB2_1__factory as KeeperRegistryLogicBFactory } from '../../../typechain/factories/KeeperRegistryLogicB2_1__factory' -import { IKeeperRegistryMaster as IKeeperRegistry } from '../../../typechain/IKeeperRegistryMaster' -import { IKeeperRegistryMaster__factory as IKeeperRegistryMasterFactory } from '../../../typechain/factories/IKeeperRegistryMaster__factory' -import { AutomationRegistryLogicB2_2__factory as AutomationRegistryLogicBFactory } from '../../../typechain/factories/AutomationRegistryLogicB2_2__factory' -import { IAutomationRegistryMaster as IAutomationRegistry } from '../../../typechain/IAutomationRegistryMaster' -import { IAutomationRegistryMaster__factory as IAutomationRegistryMasterFactory } from '../../../typechain/factories/IAutomationRegistryMaster__factory' import { assert } from 'chai' import { FunctionFragment } from '@ethersproject/abi' import { AutomationRegistryLogicC2_3__factory as AutomationRegistryLogicC2_3Factory } from '../../../typechain/factories/AutomationRegistryLogicC2_3__factory' @@ -13,32 +7,6 @@ import { ZKSyncAutomationRegistryLogicC2_3__factory as ZKSyncAutomationRegistryL import { IAutomationRegistryMaster2_3 as IAutomationRegistry2_3 } from '../../../typechain/IAutomationRegistryMaster2_3' import { IAutomationRegistryMaster2_3__factory as IAutomationRegistryMaster2_3Factory } from '../../../typechain/factories/IAutomationRegistryMaster2_3__factory' -export const deployRegistry21 = async ( - from: Signer, - mode: Parameters[0], - link: Parameters[1], - linkNative: Parameters[2], - fastgas: Parameters[3], -): Promise => { - const logicBFactory = await ethers.getContractFactory( - 'KeeperRegistryLogicB2_1', - ) - const logicAFactory = await ethers.getContractFactory( - 'KeeperRegistryLogicA2_1', - ) - const registryFactory = await ethers.getContractFactory('KeeperRegistry2_1') - const forwarderLogicFactory = await ethers.getContractFactory( - 'AutomationForwarderLogic', - ) - const forwarderLogic = await forwarderLogicFactory.connect(from).deploy() - const logicB = await logicBFactory - .connect(from) - .deploy(mode, link, linkNative, fastgas, forwarderLogic.address) - const logicA = await logicAFactory.connect(from).deploy(logicB.address) - const master = await registryFactory.connect(from).deploy(logicA.address) - return IKeeperRegistryMasterFactory.connect(master.address, from) -} - type InterfaceABI = ConstructorParameters[0] type Entry = { inputs?: any[] @@ -130,42 +98,6 @@ export const assertSatisfiesInterface = ( } } -export const deployRegistry22 = async ( - from: Signer, - link: Parameters[0], - linkNative: Parameters[1], - fastgas: Parameters[2], - allowedReadOnlyAddress: Parameters< - AutomationRegistryLogicBFactory['deploy'] - >[3], -): Promise => { - const logicBFactory = await ethers.getContractFactory( - 'AutomationRegistryLogicB2_2', - ) - const logicAFactory = await ethers.getContractFactory( - 'AutomationRegistryLogicA2_2', - ) - const registryFactory = await ethers.getContractFactory( - 'AutomationRegistry2_2', - ) - const forwarderLogicFactory = await ethers.getContractFactory( - 'AutomationForwarderLogic', - ) - const forwarderLogic = await forwarderLogicFactory.connect(from).deploy() - const logicB = await logicBFactory - .connect(from) - .deploy( - link, - linkNative, - fastgas, - forwarderLogic.address, - allowedReadOnlyAddress, - ) - const logicA = await logicAFactory.connect(from).deploy(logicB.address) - const master = await registryFactory.connect(from).deploy(logicA.address) - return IAutomationRegistryMasterFactory.connect(master.address, from) -} - export const deployRegistry23 = async ( from: Signer, link: Parameters[0], From da03b850e76296ef652dfe3532c7aebefd58bea2 Mon Sep 17 00:00:00 2001 From: Mateusz Sekara Date: Wed, 8 Jan 2025 17:23:37 +0100 Subject: [PATCH 7/8] CCIP Config backported from CCIP repo (#15856) * Moving configs directly from CCIP repo * Moving configs directly from CCIP repo --- ccip/config/evm/Astar_Mainnet.toml | 8 +- ccip/config/evm/Astar_Shibuya.toml | 7 +- ccip/config/evm/Avalanche_ANZ_testnet.toml | 4 +- ccip/config/evm/Avalanche_Fuji.toml | 3 +- ccip/config/evm/Avalanche_Mainnet.toml | 4 +- ccip/config/evm/BOB_Mainnet.toml | 28 +++++++ ccip/config/evm/BOB_Testnet.toml | 28 +++++++ ccip/config/evm/BSC_Mainnet.toml | 7 ++ ccip/config/evm/BSC_Testnet.toml | 3 +- ccip/config/evm/Base_Mainnet.toml | 3 + ccip/config/evm/Base_Sepolia.toml | 3 + ccip/config/evm/Berachain_Testnet.toml | 24 ++++++ ccip/config/evm/Bitlayer_Mainnet.toml | 16 ++++ ccip/config/evm/Bitlayer_Testnet.toml | 16 ++++ ccip/config/evm/Blast_Mainnet.toml | 5 +- ccip/config/evm/Blast_Sepolia.toml | 5 +- ccip/config/evm/Bsquared_Mainnet.toml | 23 ++++++ ccip/config/evm/Bsquared_Testnet.toml | 23 ++++++ ccip/config/evm/Celo_Mainnet.toml | 8 +- ccip/config/evm/Celo_Testnet.toml | 3 + ccip/config/evm/Ethereum_Mainnet.toml | 5 ++ ccip/config/evm/Ethereum_Sepolia.toml | 2 + ccip/config/evm/Fantom_Mainnet.toml | 7 +- ccip/config/evm/Fantom_Testnet.toml | 7 +- ccip/config/evm/Gnosis_Chiado.toml | 5 ++ ccip/config/evm/Gnosis_Mainnet.toml | 5 ++ ccip/config/evm/Harmony_Mainnet.toml | 13 +++ ccip/config/evm/Harmony_Testnet.toml | 13 +++ ccip/config/evm/Hashkey_Mainnet.toml | 16 ++++ ccip/config/evm/Hashkey_Testnet.toml | 16 ++++ ccip/config/evm/Heco_Mainnet.toml | 26 ++++++ ccip/config/evm/Hedera_Mainnet.toml | 35 ++++++++ ccip/config/evm/Hedera_Testnet.toml | 35 ++++++++ ccip/config/evm/Klaytn_Mainnet.toml | 15 ++++ ccip/config/evm/Klaytn_Testnet.toml | 15 ++++ ccip/config/evm/Kroma_Mainnet.toml | 8 +- ccip/config/evm/Kroma_Sepolia.toml | 8 +- ccip/config/evm/L3X_Mainnet.toml | 6 +- ccip/config/evm/L3X_Sepolia.toml | 6 +- ccip/config/evm/Linea_Goerli.toml | 17 ++++ ccip/config/evm/Linea_Mainnet.toml | 7 +- ccip/config/evm/Linea_Sepolia.toml | 5 +- ccip/config/evm/Mantle_Mainnet.toml | 33 ++++++++ ccip/config/evm/Mantle_Sepolia.toml | 31 +++++-- ccip/config/evm/Metis_Mainnet.toml | 15 +++- ccip/config/evm/Metis_Sepolia.toml | 5 +- ccip/config/evm/Mode_Mainnet.toml | 3 + ccip/config/evm/Mode_Sepolia.toml | 3 + ccip/config/evm/Optimism_Mainnet.toml | 3 + ccip/config/evm/Optimism_Sepolia.toml | 3 + ccip/config/evm/Polygon_Amoy.toml | 7 +- ccip/config/evm/Polygon_Mainnet.toml | 3 + ccip/config/evm/Polygon_Mumbai.toml | 31 +++++++ ccip/config/evm/Polygon_Zkevm_Cardona.toml | 13 ++- ccip/config/evm/Polygon_Zkevm_Mainnet.toml | 12 +-- ccip/config/evm/RSK_Mainnet.toml | 13 +++ ccip/config/evm/RSK_Testnet.toml | 10 +++ ccip/config/evm/Ronin_Mainnet.toml | 16 ++++ ccip/config/evm/Ronin_Saigon.toml | 16 ++++ ccip/config/evm/Scroll_Mainnet.toml | 3 + ccip/config/evm/Scroll_Sepolia.toml | 3 + ccip/config/evm/Simulated.toml | 6 +- ccip/config/evm/Soneium_Sepolia.toml | 35 ++++++++ ccip/config/evm/Sonic_Mainnet.toml | 28 +++++++ ccip/config/evm/Sonic_Testnet.toml | 28 +++++++ ccip/config/evm/Unichain_Testnet.toml | 26 ++++++ ccip/config/evm/WeMix_Mainnet.toml | 4 +- ccip/config/evm/WeMix_Testnet.toml | 3 +- ccip/config/evm/Worldchain_Mainnet.toml | 23 ++++++ ccip/config/evm/Worldchain_Testnet.toml | 23 ++++++ ccip/config/evm/XLayer_Mainnet.toml | 2 +- ccip/config/evm/XLayer_Sepolia.toml | 3 + ccip/config/evm/fallback.toml | 95 ++++++++++++++++++++++ ccip/config/evm/zkSync_Mainnet.toml | 2 +- ccip/config/evm/zkSync_Sepolia.toml | 15 ++-- 75 files changed, 955 insertions(+), 54 deletions(-) create mode 100644 ccip/config/evm/BOB_Mainnet.toml create mode 100644 ccip/config/evm/BOB_Testnet.toml create mode 100644 ccip/config/evm/Berachain_Testnet.toml create mode 100644 ccip/config/evm/Bitlayer_Mainnet.toml create mode 100644 ccip/config/evm/Bitlayer_Testnet.toml create mode 100644 ccip/config/evm/Bsquared_Mainnet.toml create mode 100644 ccip/config/evm/Bsquared_Testnet.toml create mode 100644 ccip/config/evm/Harmony_Mainnet.toml create mode 100644 ccip/config/evm/Harmony_Testnet.toml create mode 100644 ccip/config/evm/Hashkey_Mainnet.toml create mode 100644 ccip/config/evm/Hashkey_Testnet.toml create mode 100644 ccip/config/evm/Heco_Mainnet.toml create mode 100644 ccip/config/evm/Hedera_Mainnet.toml create mode 100644 ccip/config/evm/Hedera_Testnet.toml create mode 100644 ccip/config/evm/Klaytn_Mainnet.toml create mode 100644 ccip/config/evm/Klaytn_Testnet.toml create mode 100644 ccip/config/evm/Linea_Goerli.toml create mode 100644 ccip/config/evm/Mantle_Mainnet.toml create mode 100644 ccip/config/evm/Polygon_Mumbai.toml create mode 100644 ccip/config/evm/RSK_Mainnet.toml create mode 100644 ccip/config/evm/RSK_Testnet.toml create mode 100644 ccip/config/evm/Ronin_Mainnet.toml create mode 100644 ccip/config/evm/Ronin_Saigon.toml create mode 100755 ccip/config/evm/Soneium_Sepolia.toml create mode 100644 ccip/config/evm/Sonic_Mainnet.toml create mode 100644 ccip/config/evm/Sonic_Testnet.toml create mode 100644 ccip/config/evm/Unichain_Testnet.toml create mode 100644 ccip/config/evm/Worldchain_Mainnet.toml create mode 100644 ccip/config/evm/Worldchain_Testnet.toml create mode 100644 ccip/config/evm/fallback.toml diff --git a/ccip/config/evm/Astar_Mainnet.toml b/ccip/config/evm/Astar_Mainnet.toml index 87808001eb7..5405a67d563 100644 --- a/ccip/config/evm/Astar_Mainnet.toml +++ b/ccip/config/evm/Astar_Mainnet.toml @@ -1,4 +1,5 @@ ChainID = '592' +ChainType = 'astar' FinalityTagEnabled = true FinalityDepth = 100 LogPollInterval = '6s' @@ -6,4 +7,9 @@ LogPollInterval = '6s' [GasEstimator] EIP1559DynamicFees = false PriceMax = '100000 gwei' -LimitDefault = 8000000 \ No newline at end of file +LimitDefault = 8000000 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Astar_Shibuya.toml b/ccip/config/evm/Astar_Shibuya.toml index 5a5df06f6f0..cfcd7c31c75 100644 --- a/ccip/config/evm/Astar_Shibuya.toml +++ b/ccip/config/evm/Astar_Shibuya.toml @@ -6,4 +6,9 @@ LogPollInterval = '6s' [GasEstimator] EIP1559DynamicFees = false PriceMax = '100000 gwei' -LimitDefault = 8000000 \ No newline at end of file +LimitDefault = 8000000 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false \ No newline at end of file diff --git a/ccip/config/evm/Avalanche_ANZ_testnet.toml b/ccip/config/evm/Avalanche_ANZ_testnet.toml index 1242e1ec06e..936a82d5092 100644 --- a/ccip/config/evm/Avalanche_ANZ_testnet.toml +++ b/ccip/config/evm/Avalanche_ANZ_testnet.toml @@ -19,4 +19,6 @@ PriceMin = '25 gwei' BlockHistorySize = 24 [HeadTracker] -PersistenceEnabled = false +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Avalanche_Fuji.toml b/ccip/config/evm/Avalanche_Fuji.toml index 7df1d26a336..4340b6b861d 100644 --- a/ccip/config/evm/Avalanche_Fuji.toml +++ b/ccip/config/evm/Avalanche_Fuji.toml @@ -17,5 +17,6 @@ PriceDefault = '1 gwei' BlockHistorySize = 24 [HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 FinalityTagBypass = false -PersistenceEnabled = false diff --git a/ccip/config/evm/Avalanche_Mainnet.toml b/ccip/config/evm/Avalanche_Mainnet.toml index 341ae5478b3..ac73a7b98fa 100644 --- a/ccip/config/evm/Avalanche_Mainnet.toml +++ b/ccip/config/evm/Avalanche_Mainnet.toml @@ -18,4 +18,6 @@ PriceDefault = '1 gwei' BlockHistorySize = 24 [HeadTracker] -PersistenceEnabled = false +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/BOB_Mainnet.toml b/ccip/config/evm/BOB_Mainnet.toml new file mode 100644 index 00000000000..70cc2fb8ba4 --- /dev/null +++ b/ccip/config/evm/BOB_Mainnet.toml @@ -0,0 +1,28 @@ +ChainID = '60808' +# OP stack https://docs.gobob.xyz/learn/introduction/stack-overview#rollup-layer +ChainType = 'optimismBedrock' +# FinalityDepth in mainnet showed more than 3k +FinalityDepth = 3150 +# block_time was: 2s, adding 1 second buffer +LogPollInterval = '3s' + +# finality_depth * block_time / 60 secs = ~105 min (finality time) +NoNewFinalizedHeadsThreshold = '110m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 2s, per recommendation skip 1-2 blocks +CacheTimeout = '4s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/BOB_Testnet.toml b/ccip/config/evm/BOB_Testnet.toml new file mode 100644 index 00000000000..bd8505c4e44 --- /dev/null +++ b/ccip/config/evm/BOB_Testnet.toml @@ -0,0 +1,28 @@ +ChainID = '808813' +# OP stack https://docs.gobob.xyz/learn/introduction/stack-overview#rollup-layer +ChainType = 'optimismBedrock' +# FinalityDepth in mainnet showed more than 3k +FinalityDepth = 3150 +# block_time was: 2s, adding 1 second buffer +LogPollInterval = '3s' + +# finality_depth * block_time / 60 secs = ~105 min (finality time) +NoNewFinalizedHeadsThreshold = '110m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 2s, per recommendation skip 1-2 blocks +CacheTimeout = '4s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/BSC_Mainnet.toml b/ccip/config/evm/BSC_Mainnet.toml index 10f4c570bef..df140e63973 100644 --- a/ccip/config/evm/BSC_Mainnet.toml +++ b/ccip/config/evm/BSC_Mainnet.toml @@ -13,6 +13,8 @@ NoNewFinalizedHeadsThreshold = '45s' [GasEstimator] PriceDefault = '5 gwei' +# Set to the BSC node's default Eth.Miner.GasPrice config +PriceMin = '3 gwei' # 15s delay since feeds update every minute in volatile situations BumpThreshold = 5 @@ -26,3 +28,8 @@ ObservationGracePeriod = '500ms' [NodePool] SyncThreshold = 10 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/BSC_Testnet.toml b/ccip/config/evm/BSC_Testnet.toml index bb13501f1a2..9c528f816ea 100644 --- a/ccip/config/evm/BSC_Testnet.toml +++ b/ccip/config/evm/BSC_Testnet.toml @@ -22,8 +22,9 @@ BlockHistorySize = 24 [HeadTracker] HistoryDepth = 100 SamplingInterval = '1s' +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 FinalityTagBypass = false -PersistenceEnabled = false [OCR] DatabaseTimeout = '2s' diff --git a/ccip/config/evm/Base_Mainnet.toml b/ccip/config/evm/Base_Mainnet.toml index da38182b194..0f895e1bc6b 100644 --- a/ccip/config/evm/Base_Mainnet.toml +++ b/ccip/config/evm/Base_Mainnet.toml @@ -20,6 +20,9 @@ ResendAfterThreshold = '30s' [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Base_Sepolia.toml b/ccip/config/evm/Base_Sepolia.toml index 92f7717b27d..202c544fb4b 100644 --- a/ccip/config/evm/Base_Sepolia.toml +++ b/ccip/config/evm/Base_Sepolia.toml @@ -21,6 +21,9 @@ ResendAfterThreshold = '30s' [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Berachain_Testnet.toml b/ccip/config/evm/Berachain_Testnet.toml new file mode 100644 index 00000000000..9fc810e8908 --- /dev/null +++ b/ccip/config/evm/Berachain_Testnet.toml @@ -0,0 +1,24 @@ +ChainID = '80084' +# finality_depth: instant +FinalityDepth = 10 +# block_time: 5s, adding 1 second buffer +LogPollInterval = '6s' + +# finality_depth * block_time / 60 secs = ~0.8 min (finality time) +NoNewFinalizedHeadsThreshold = '5m' + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 5s, per recommendation skip 1-2 blocks +CacheTimeout = '10s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Bitlayer_Mainnet.toml b/ccip/config/evm/Bitlayer_Mainnet.toml new file mode 100644 index 00000000000..f6d669d4f78 --- /dev/null +++ b/ccip/config/evm/Bitlayer_Mainnet.toml @@ -0,0 +1,16 @@ +ChainID = '200901' +FinalityTagEnabled = false +FinalityDepth = 21 # confirmed with Bitlayer team and recommended by docs: https://docs.bitlayer.org/docs/Learn/BitlayerNetwork/AboutFinality/#about-finality-at-stage-bitlayer-pos-bitlayer-mainnet-v1 + +[GasEstimator] +Mode = 'FeeHistory' +EIP1559DynamicFees = false +PriceMax = '1 gwei' # DS&A recommended value +PriceMin = '40 mwei' # During testing, we saw minimum gas prices ~50 mwei +PriceDefault = '1 gwei' # As we set PriceMax to '1 gwei' and PriceDefault must be less than or equal to PriceMax +FeeCapDefault = '1 gwei' # As we set PriceMax to '1 gwei' and FeeCapDefault must be less than or equal to PriceMax + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Bitlayer_Testnet.toml b/ccip/config/evm/Bitlayer_Testnet.toml new file mode 100644 index 00000000000..7107527ce2f --- /dev/null +++ b/ccip/config/evm/Bitlayer_Testnet.toml @@ -0,0 +1,16 @@ +ChainID = '200810' +FinalityTagEnabled = false +FinalityDepth = 21 # confirmed with Bitlayer team and recommended by docs: https://docs.bitlayer.org/docs/Learn/BitlayerNetwork/AboutFinality/#about-finality-at-stage-bitlayer-pos-bitlayer-mainnet-v1 + +[GasEstimator] +Mode='FeeHistory' +EIP1559DynamicFees = false +PriceMax = '1 gwei' # DS&A recommended value +PriceMin = '40 mwei' # During testing, we saw minimum gas prices ~50 mwei +PriceDefault = '1 gwei' # As we set PriceMax to '1 gwei' and PriceDefault must be less than or equal to PriceMax +FeeCapDefault = '1 gwei' # As we set PriceMax to '1 gwei' and FeeCapDefault must be less than or equal to PriceMax + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Blast_Mainnet.toml b/ccip/config/evm/Blast_Mainnet.toml index f8b501723ff..26ecddeec54 100644 --- a/ccip/config/evm/Blast_Mainnet.toml +++ b/ccip/config/evm/Blast_Mainnet.toml @@ -26,9 +26,12 @@ EIP1559FeeCapBufferBlocks = 0 [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] # 4 block sync time between nodes to ensure they aren't labelled unreachable too soon PollFailureThreshold = 4 # polls every 4sec to check if there is a block produced, since blockRate is ~3sec -PollInterval = '4s' \ No newline at end of file +PollInterval = '4s' diff --git a/ccip/config/evm/Blast_Sepolia.toml b/ccip/config/evm/Blast_Sepolia.toml index 96dc5c67871..55f2356ad3a 100644 --- a/ccip/config/evm/Blast_Sepolia.toml +++ b/ccip/config/evm/Blast_Sepolia.toml @@ -26,9 +26,12 @@ EIP1559FeeCapBufferBlocks = 0 [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] # 4 block sync time between nodes to ensure they aren't labelled unreachable too soon PollFailureThreshold = 4 # polls every 4sec to check if there is a block produced, since blockRate is ~3sec -PollInterval = '4s' \ No newline at end of file +PollInterval = '4s' diff --git a/ccip/config/evm/Bsquared_Mainnet.toml b/ccip/config/evm/Bsquared_Mainnet.toml new file mode 100644 index 00000000000..61b0e5337c7 --- /dev/null +++ b/ccip/config/evm/Bsquared_Mainnet.toml @@ -0,0 +1,23 @@ +ChainID = '223' +# OP stack from questionnaire https://docs.google.com/spreadsheets/d/1l8dx1GzxEnjgwH5x3vB60FUr5iFALzPcs6W_wOAiuDs/edit?gid=625078687#gid=625078687 +ChainType = 'optimismBedrock' +# finality_depth was: ~1900 +FinalityDepth = 2000 +# block_time: ~2s, adding 1 second buffer +LogPollInterval = '3s' + +# finality_depth * block_time / 60 secs = ~66 min (finality time) +NoNewFinalizedHeadsThreshold = '70m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 2s, per recommendation skip 1-2 blocks +CacheTimeout = '4s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 diff --git a/ccip/config/evm/Bsquared_Testnet.toml b/ccip/config/evm/Bsquared_Testnet.toml new file mode 100644 index 00000000000..b7cfd35fc41 --- /dev/null +++ b/ccip/config/evm/Bsquared_Testnet.toml @@ -0,0 +1,23 @@ +ChainID = '1123' +# OP stack from questionnaire https://docs.google.com/spreadsheets/d/1l8dx1GzxEnjgwH5x3vB60FUr5iFALzPcs6W_wOAiuDs/edit?gid=625078687#gid=625078687 +ChainType = 'optimismBedrock' +# finality_depth was: ~1900 +FinalityDepth = 2000 +# block_time: ~2s, adding 1 second buffer +LogPollInterval = '3s' + +# finality_depth * block_time / 60 secs = ~66 min (finality time) +NoNewFinalizedHeadsThreshold = '70m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 2s, per recommendation skip 1-2 blocks +CacheTimeout = '4s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 diff --git a/ccip/config/evm/Celo_Mainnet.toml b/ccip/config/evm/Celo_Mainnet.toml index 0ed08986d32..9da7d632d0d 100644 --- a/ccip/config/evm/Celo_Mainnet.toml +++ b/ccip/config/evm/Celo_Mainnet.toml @@ -1,6 +1,10 @@ ChainID = '42220' ChainType = 'celo' +# FT and FD are both present here because the dev effort rely only on FinalityTagEnabled are still in progress. +# We expect to be able to rely only on FinalityTagEnabled=true in the short future. +# https://chainlink-core.slack.com/archives/C05CS33N08N/p1715102940763339?thread_ts=1715102478.537529&cid=C05CS33N08N FinalityDepth = 10 +FinalityTagEnabled = true LogPollInterval = '5s' MinIncomingConfirmations = 1 NoNewHeadsThreshold = '1m' @@ -18,4 +22,6 @@ BlockHistorySize = 12 [HeadTracker] HistoryDepth = 50 -PersistenceEnabled = false +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Celo_Testnet.toml b/ccip/config/evm/Celo_Testnet.toml index 0e4594150dd..c03d855acf6 100644 --- a/ccip/config/evm/Celo_Testnet.toml +++ b/ccip/config/evm/Celo_Testnet.toml @@ -1,5 +1,8 @@ ChainID = '44787' ChainType = 'celo' +# FT and FD are both present here because the dev effort rely only on FinalityTagEnabled are still in progress. +# We expect to be able to rely only on FinalityTagEnabled=true in the short future. +# https://chainlink-core.slack.com/archives/C05CS33N08N/p1715102940763339?thread_ts=1715102478.537529&cid=C05CS33N08N FinalityTagEnabled = true FinalityDepth = 2750 # mean finality time of ~37 minutes + 500 block buffer LogPollInterval = '1s' # 1 sec block rate diff --git a/ccip/config/evm/Ethereum_Mainnet.toml b/ccip/config/evm/Ethereum_Mainnet.toml index 0bcaf35c648..ec3a78156ed 100644 --- a/ccip/config/evm/Ethereum_Mainnet.toml +++ b/ccip/config/evm/Ethereum_Mainnet.toml @@ -15,3 +15,8 @@ TransactionPercentile = 50 [OCR2.Automation] GasLimit = 10500000 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Ethereum_Sepolia.toml b/ccip/config/evm/Ethereum_Sepolia.toml index 24a0e68f77a..966f091f891 100644 --- a/ccip/config/evm/Ethereum_Sepolia.toml +++ b/ccip/config/evm/Ethereum_Sepolia.toml @@ -14,4 +14,6 @@ TransactionPercentile = 50 GasLimit = 10500000 [HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 FinalityTagBypass = false diff --git a/ccip/config/evm/Fantom_Mainnet.toml b/ccip/config/evm/Fantom_Mainnet.toml index 7e76d94278d..2af504796e0 100644 --- a/ccip/config/evm/Fantom_Mainnet.toml +++ b/ccip/config/evm/Fantom_Mainnet.toml @@ -9,4 +9,9 @@ RPCBlockQueryDelay = 2 Mode = 'SuggestedPrice' [OCR2.Automation] -GasLimit = 3800000 \ No newline at end of file +GasLimit = 3800000 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Fantom_Testnet.toml b/ccip/config/evm/Fantom_Testnet.toml index 5f24a76c2e7..b361a8d14dd 100644 --- a/ccip/config/evm/Fantom_Testnet.toml +++ b/ccip/config/evm/Fantom_Testnet.toml @@ -9,4 +9,9 @@ RPCBlockQueryDelay = 2 Mode = 'SuggestedPrice' [OCR2.Automation] -GasLimit = 3800000 \ No newline at end of file +GasLimit = 3800000 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Gnosis_Chiado.toml b/ccip/config/evm/Gnosis_Chiado.toml index 379377a2266..320aa087209 100644 --- a/ccip/config/evm/Gnosis_Chiado.toml +++ b/ccip/config/evm/Gnosis_Chiado.toml @@ -8,3 +8,8 @@ NoNewFinalizedHeadsThreshold = '2m' [GasEstimator] EIP1559DynamicFees = true PriceMax = '500 gwei' + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Gnosis_Mainnet.toml b/ccip/config/evm/Gnosis_Mainnet.toml index 628646364f5..ec8ac227f78 100644 --- a/ccip/config/evm/Gnosis_Mainnet.toml +++ b/ccip/config/evm/Gnosis_Mainnet.toml @@ -16,3 +16,8 @@ PriceDefault = '1 gwei' PriceMax = '500 gwei' # 1 Gwei is the minimum accepted by the validators (unless whitelisted) PriceMin = '1 gwei' + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Harmony_Mainnet.toml b/ccip/config/evm/Harmony_Mainnet.toml new file mode 100644 index 00000000000..1cee98e77c7 --- /dev/null +++ b/ccip/config/evm/Harmony_Mainnet.toml @@ -0,0 +1,13 @@ +ChainID = '1666600000' +LinkContractAddress = '0x218532a12a389a4a92fC0C5Fb22901D1c19198aA' +LogPollInterval = '2s' +MinIncomingConfirmations = 1 +NoNewHeadsThreshold = '30s' + +[GasEstimator] +PriceDefault = '5 gwei' + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Harmony_Testnet.toml b/ccip/config/evm/Harmony_Testnet.toml new file mode 100644 index 00000000000..8b7c85b9c28 --- /dev/null +++ b/ccip/config/evm/Harmony_Testnet.toml @@ -0,0 +1,13 @@ +ChainID = '1666700000' +LinkContractAddress = '0x8b12Ac23BFe11cAb03a634C1F117D64a7f2cFD3e' +LogPollInterval = '2s' +MinIncomingConfirmations = 1 +NoNewHeadsThreshold = '30s' + +[GasEstimator] +PriceDefault = '5 gwei' + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Hashkey_Mainnet.toml b/ccip/config/evm/Hashkey_Mainnet.toml new file mode 100644 index 00000000000..69450c96f80 --- /dev/null +++ b/ccip/config/evm/Hashkey_Mainnet.toml @@ -0,0 +1,16 @@ +ChainID = '177' +ChainType = 'optimismBedrock' +FinalityTagEnabled = true + +[GasEstimator] +PriceMax = '1000 gwei' +LimitDefault = 8000000 +FeeCapDefault = '1000 gwei' + +[NodePool] +PollFailureThreshold = 2 +PollInterval = '8s' + +[GasEstimator.DAOracle] +OracleType = 'opstack' +OracleAddress = '0x420000000000000000000000000000000000000F' \ No newline at end of file diff --git a/ccip/config/evm/Hashkey_Testnet.toml b/ccip/config/evm/Hashkey_Testnet.toml new file mode 100644 index 00000000000..c342e503a33 --- /dev/null +++ b/ccip/config/evm/Hashkey_Testnet.toml @@ -0,0 +1,16 @@ +ChainID = '133' +ChainType = 'optimismBedrock' +FinalityTagEnabled = true + +[GasEstimator] +PriceMax = '1000 gwei' +LimitDefault = 8000000 +FeeCapDefault = '1000 gwei' + +[NodePool] +PollFailureThreshold = 2 +PollInterval = '8s' + +[GasEstimator.DAOracle] +OracleType = 'opstack' +OracleAddress = '0x420000000000000000000000000000000000000F' \ No newline at end of file diff --git a/ccip/config/evm/Heco_Mainnet.toml b/ccip/config/evm/Heco_Mainnet.toml new file mode 100644 index 00000000000..a39e405be31 --- /dev/null +++ b/ccip/config/evm/Heco_Mainnet.toml @@ -0,0 +1,26 @@ +# Heco uses BSC's settings. +ChainID = '128' +LinkContractAddress = '0x404460C6A5EdE2D891e8297795264fDe62ADBB75' +LogPollInterval = '3s' +NoNewHeadsThreshold = '30s' +RPCBlockQueryDelay = 2 + +[GasEstimator] +PriceDefault = '5 gwei' +BumpThreshold = 5 + +[GasEstimator.BlockHistory] +BlockHistorySize = 24 + +[OCR] +DatabaseTimeout = '2s' +ContractTransmitterTransmitTimeout = '2s' +ObservationGracePeriod = '500ms' + +[NodePool] +SyncThreshold = 10 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Hedera_Mainnet.toml b/ccip/config/evm/Hedera_Mainnet.toml new file mode 100644 index 00000000000..fdd6528e0a4 --- /dev/null +++ b/ccip/config/evm/Hedera_Mainnet.toml @@ -0,0 +1,35 @@ +ChainID = '295' +ChainType = 'hedera' +# Considering the 3-5 (6 including a buffer) seconds of finality and 2 seconds block production +# We set the depth to 6/2 = 3 blocks, setting to 10 for safety +FinalityDepth = 10 +# Hedera has high TPS, so polling less often +LogPollInterval = '10s' +MinIncomingConfirmations = 1 + +[BalanceMonitor] +Enabled = true + +[GasEstimator] +Mode = 'SuggestedPrice' +# Since Hedera dont have mempool and there's no way for a node to front run or a user to bribe a node to submit the transaction earlier than it's consensus timestamp, +# But they have automated congesting pricing throttling which would mean at high sustained level the gasPrice itself could be increased to prevent malicious behaviour. +# Disabling the Bumpthreshold as TXM now implicity handles the bumping after checking on-chain nonce & re-broadcast for Hedera chain type +BumpThreshold = 0 +BumpMin = '10 gwei' +BumpPercent = 20 + +[Transactions] +# To hit throttling you'd need to maintain 15 m gas /sec over a prolonged period of time. +# Because Hedera's block times are every 2 secs it's less less likely to happen as compared to other chains +# Setting this to little higher even though Hedera has High TPS, We have seen 10-12s to get the trasaction mined & 20-25s incase of failures +# Accounting for Node syncs & avoid re-sending txns before fetching the receipt, setting to 2m +ResendAfterThreshold = '2m' + +[NodePool] +SyncThreshold = 10 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Hedera_Testnet.toml b/ccip/config/evm/Hedera_Testnet.toml new file mode 100644 index 00000000000..7e9ec3fe2c6 --- /dev/null +++ b/ccip/config/evm/Hedera_Testnet.toml @@ -0,0 +1,35 @@ +ChainID = '296' +ChainType = 'hedera' +# Considering the 3-5 (6 including a buffer) seconds of finality and 2 seconds block production +# We set the depth to 6/2 = 3 blocks, setting to 10 for safety +FinalityDepth = 10 +# Hedera has high TPS, so polling less often +LogPollInterval = '10s' +MinIncomingConfirmations = 1 + +[BalanceMonitor] +Enabled = true + +[GasEstimator] +Mode = 'SuggestedPrice' +# Since Hedera dont have mempool and there's no way for a node to front run or a user to bribe a node to submit the transaction earlier than it's consensus timestamp, +# But they have automated congesting pricing throttling which would mean at high sustained level the gasPrice itself could be increased to prevent malicious behaviour. +# Disabling the Bumpthreshold as TXM now implicity handles the bumping after checking on-chain nonce & re-broadcast for Hedera chain type +BumpThreshold = 0 +BumpMin = '10 gwei' +BumpPercent = 20 + +[Transactions] +# To hit throttling you'd need to maintain 15 m gas /sec over a prolonged period of time. +# Because Hedera's block times are every 2 secs it's less less likely to happen as compared to other chains +# Setting this to little higher even though Hedera has High TPS, We have seen 10-12s to get the trasaction mined & 20-25s incase of failures +# Accounting for Node syncs & avoid re-sending txns before fetching the receipt, setting to 2m +ResendAfterThreshold = '2m' + +[NodePool] +SyncThreshold = 10 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Klaytn_Mainnet.toml b/ccip/config/evm/Klaytn_Mainnet.toml new file mode 100644 index 00000000000..ff8b97de970 --- /dev/null +++ b/ccip/config/evm/Klaytn_Mainnet.toml @@ -0,0 +1,15 @@ +ChainID = '8217' +FinalityDepth = 10 +MinIncomingConfirmations = 1 +NoNewHeadsThreshold = '30s' +OCR.ContractConfirmations = 1 + +[GasEstimator] +Mode = 'SuggestedPrice' +PriceDefault = '750 gwei' # gwei = ston +BumpThreshold = 5 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Klaytn_Testnet.toml b/ccip/config/evm/Klaytn_Testnet.toml new file mode 100644 index 00000000000..599b604f086 --- /dev/null +++ b/ccip/config/evm/Klaytn_Testnet.toml @@ -0,0 +1,15 @@ +ChainID = '1001' +FinalityDepth = 10 +MinIncomingConfirmations = 1 +NoNewHeadsThreshold = '30s' +OCR.ContractConfirmations = 1 + +[GasEstimator] +Mode = 'SuggestedPrice' +PriceDefault = '750 gwei' # gwei = ston +BumpThreshold = 5 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Kroma_Mainnet.toml b/ccip/config/evm/Kroma_Mainnet.toml index 3a48aa8ae1b..21bbe7c357c 100644 --- a/ccip/config/evm/Kroma_Mainnet.toml +++ b/ccip/config/evm/Kroma_Mainnet.toml @@ -1,6 +1,9 @@ ChainID = '255' ChainType = 'kroma' # Kroma is based on the Optimism Bedrock architechture -FinalityDepth = 400 +# FT and FD are both present here because the dev effort rely only on FinalityTagEnabled are still in progress. +# We expect to be able to rely only on FinalityTagEnabled=true in the short future. +# https://chainlink-core.slack.com/archives/C05CS33N08N/p1715102940763339?thread_ts=1715102478.537529&cid=C05CS33N08N +FinalityDepth = 700 FinalityTagEnabled = true LogPollInterval = '2s' NoNewHeadsThreshold = '40s' @@ -19,6 +22,9 @@ ResendAfterThreshold = '30s' [HeadTracker] HistoryDepth = 400 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Kroma_Sepolia.toml b/ccip/config/evm/Kroma_Sepolia.toml index 9609a09e076..120737df47b 100644 --- a/ccip/config/evm/Kroma_Sepolia.toml +++ b/ccip/config/evm/Kroma_Sepolia.toml @@ -1,6 +1,9 @@ ChainID = '2358' ChainType = 'kroma' # Kroma is based on the Optimism Bedrock architechture -FinalityDepth = 400 +# FT and FD are both present here because the dev effort rely only on FinalityTagEnabled are still in progress. +# We expect to be able to rely only on FinalityTagEnabled=true in the short future. +# https://chainlink-core.slack.com/archives/C05CS33N08N/p1715102940763339?thread_ts=1715102478.537529&cid=C05CS33N08N +FinalityDepth = 700 FinalityTagEnabled = true LogPollInterval = '2s' NoNewHeadsThreshold = '40s' @@ -19,6 +22,9 @@ ResendAfterThreshold = '30s' [HeadTracker] HistoryDepth = 400 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/L3X_Mainnet.toml b/ccip/config/evm/L3X_Mainnet.toml index 9dd33c9e15d..5f14e5e8e8c 100644 --- a/ccip/config/evm/L3X_Mainnet.toml +++ b/ccip/config/evm/L3X_Mainnet.toml @@ -17,5 +17,7 @@ PriceDefault = '0.1 gwei' FeeCapDefault = '1000 gwei' BumpThreshold = 5 -[GasEstimator.DAOracle] -OracleType = 'arbitrum' +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/L3X_Sepolia.toml b/ccip/config/evm/L3X_Sepolia.toml index c0f6a60e943..ca21bc13d6e 100644 --- a/ccip/config/evm/L3X_Sepolia.toml +++ b/ccip/config/evm/L3X_Sepolia.toml @@ -17,5 +17,7 @@ PriceDefault = '0.1 gwei' FeeCapDefault = '1000 gwei' BumpThreshold = 5 -[GasEstimator.DAOracle] -OracleType = 'arbitrum' +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Linea_Goerli.toml b/ccip/config/evm/Linea_Goerli.toml new file mode 100644 index 00000000000..2c85f9cbc02 --- /dev/null +++ b/ccip/config/evm/Linea_Goerli.toml @@ -0,0 +1,17 @@ +ChainID = '59140' +# Block time 12s, finality < 3m +FinalityDepth = 15 +# Blocks are only emitted when a transaction happens / no empty blocks +NoNewHeadsThreshold = '0' + +[GasEstimator] +BumpPercent = 40 + +[Transactions] +# increase resend time to align with finality +ResendAfterThreshold = '3m' + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Linea_Mainnet.toml b/ccip/config/evm/Linea_Mainnet.toml index 5a89873acae..6614fef9d4e 100644 --- a/ccip/config/evm/Linea_Mainnet.toml +++ b/ccip/config/evm/Linea_Mainnet.toml @@ -1,6 +1,6 @@ ChainID = '59144' -# Block time 12s, finality < 60m -FinalityDepth = 300 +#3s block time ~ 20m finality based on committee decision +FinalityDepth = 600 # Blocks are only emitted when a transaction happens / no empty blocks NoNewHeadsThreshold = '0' @@ -15,6 +15,9 @@ ResendAfterThreshold = '3m' # set greater than finality depth [HeadTracker] HistoryDepth = 350 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [Transactions.AutoPurge] Enabled = true diff --git a/ccip/config/evm/Linea_Sepolia.toml b/ccip/config/evm/Linea_Sepolia.toml index 8f168ee93a6..2837c7ca601 100644 --- a/ccip/config/evm/Linea_Sepolia.toml +++ b/ccip/config/evm/Linea_Sepolia.toml @@ -1,5 +1,5 @@ ChainID = '59141' -FinalityDepth = 900 +FinalityDepth = 200 NoNewHeadsThreshold = '0' [GasEstimator] @@ -11,6 +11,9 @@ ResendAfterThreshold = '3m' [HeadTracker] HistoryDepth = 1000 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [Transactions.AutoPurge] Enabled = true diff --git a/ccip/config/evm/Mantle_Mainnet.toml b/ccip/config/evm/Mantle_Mainnet.toml new file mode 100644 index 00000000000..23d5168a7e9 --- /dev/null +++ b/ccip/config/evm/Mantle_Mainnet.toml @@ -0,0 +1,33 @@ +ChainID = '5000' +FinalityTagEnabled = true +FinalityDepth = 1200 +ChainType = 'optimismBedrock' +LogPollInterval = '2s' +MinIncomingConfirmations = 1 +NoNewFinalizedHeadsThreshold = '40m0s' + +[HeadTracker] +HistoryDepth = 1250 + +[GasEstimator] +PriceMax = '120 gwei' +# Limit values are high as Mantle's GasPrice is in native token (MNT) instead of ETH. Their proprietary TokenRatio parameter is used to adjust fees +LimitDefault = 80_000_000_000 +LimitMax = 100_000_000_000 +BumpMin = '100 wei' +BumpThreshold = 60 +EIP1559DynamicFees = true +FeeCapDefault = '120 gwei' +# Mantle recommends setting Priority Fee to 0 in their docs linked here: https://docs-v2.mantle.xyz/devs/concepts/tx-fee/eip-1559#application-of-eip-1559-in-mantle-v2-tectonic +TipCapDefault = '0 wei' +TipCapMin = '0 wei' + +[GasEstimator.BlockHistory] +# Default is 24, which leads to bumpy gas prices. In CCIP +# we want to smooth out the gas prices, so we increase the sample size. +BlockHistorySize = 200 +# The formula for FeeCap is (current block base fee * (1.125 ^ EIP1559FeeCapBufferBlocks) + tipcap) +# where tipcap is managed by the block history estimators. In the context of CCIP, +# the gas price is relayed to other changes for quotes so we want accurate/avg not pessimistic values. +# So we set this to zero so FeeCap = baseFee + tipcap. +EIP1559FeeCapBufferBlocks = 0 \ No newline at end of file diff --git a/ccip/config/evm/Mantle_Sepolia.toml b/ccip/config/evm/Mantle_Sepolia.toml index ee994a71826..705f91142f2 100644 --- a/ccip/config/evm/Mantle_Sepolia.toml +++ b/ccip/config/evm/Mantle_Sepolia.toml @@ -1,19 +1,34 @@ ChainID = '5003' +FinalityTagEnabled = true +FinalityDepth = 1200 ChainType = 'optimismBedrock' -FinalityDepth = 500 LogPollInterval = '2s' -NoNewHeadsThreshold = '0' MinIncomingConfirmations = 1 +NoNewFinalizedHeadsThreshold = '60m0s' [HeadTracker] -HistoryDepth = 600 +HistoryDepth = 1250 -[GasEstimator] -Mode = 'L2Suggested' -PriceMax = '200 gwei' -LimitDefault = 100000000 -FeeCapDefault = '200 gwei' +[GasEstimator] +PriceMax = '120 gwei' +# Limit values are high as Mantle's GasPrice is in native token (MNT) instead of ETH. Their proprietary TokenRatio parameter is used to adjust fees +LimitDefault = 80000000000 +LimitMax = 100000000000 +BumpMin = '100 wei' +BumpPercent = 20 +BumpThreshold = 60 +EIP1559DynamicFees = true +FeeCapDefault = '120 gwei' +# Mantle reccomends setting Priority Fee to 0 in their docs linked here: https://docs-v2.mantle.xyz/devs/concepts/tx-fee/eip-1559#application-of-eip-1559-in-mantle-v2-tectonic +TipCapDefault = '0 wei' +TipCapMin = '0 wei' [GasEstimator.BlockHistory] +# Default is 24, which leads to bumpy gas prices. In CCIP +# we want to smooth out the gas prices, so we increase the sample size. BlockHistorySize = 200 +# The formula for FeeCap is (current block base fee * (1.125 ^ EIP1559FeeCapBufferBlocks) + tipcap) +# where tipcap is managed by the block history estimators. In the context of CCIP, +# the gas price is relayed to other changes for quotes so we want accurate/avg not pessimistic values. +# So we set this to zero so FeeCap = baseFee + tipcap. EIP1559FeeCapBufferBlocks = 0 \ No newline at end of file diff --git a/ccip/config/evm/Metis_Mainnet.toml b/ccip/config/evm/Metis_Mainnet.toml index f057400d014..a95945e9f1b 100644 --- a/ccip/config/evm/Metis_Mainnet.toml +++ b/ccip/config/evm/Metis_Mainnet.toml @@ -1,8 +1,14 @@ # Metis is an L2 chain based on Optimism. ChainID = '1088' -ChainType = 'metis' +ChainType = 'optimismBedrock' # Sequencer offers absolute finality -FinalityDepth = 10 +# High variation on finality depth triggered a commitee to investigate +# and set 500 as a secure finality depth. +# https://chainlink-core.slack.com/archives/C0725LNLJLA/p1717118469587219 +FinalityDepth = 500 +# FT and FD are both present here because the dev effort rely only on FinalityTagEnabled are still in progress. +# We expect to be able to rely only on FinalityTagEnabled=true in the short future. +# https://chainlink-core.slack.com/archives/C05CS33N08N/p1715102940763339?thread_ts=1715102478.537529&cid=C05CS33N08N FinalityTagEnabled = true MinIncomingConfirmations = 1 NoNewHeadsThreshold = '0' @@ -19,3 +25,8 @@ BlockHistorySize = 0 [NodePool] SyncThreshold = 10 + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Metis_Sepolia.toml b/ccip/config/evm/Metis_Sepolia.toml index 4ff4056c75d..65247991d31 100644 --- a/ccip/config/evm/Metis_Sepolia.toml +++ b/ccip/config/evm/Metis_Sepolia.toml @@ -1,6 +1,9 @@ ChainID = '59902' ChainType = 'optimismBedrock' -FinalityDepth = 10 +# FT and FD are both present here because the dev effort rely only on FinalityTagEnabled are still in progress. +# We expect to be able to rely only on FinalityTagEnabled=true in the short future. +# https://chainlink-core.slack.com/archives/C05CS33N08N/p1715102940763339?thread_ts=1715102478.537529&cid=C05CS33N08N +FinalityDepth = 3000 FinalityTagEnabled = true MinIncomingConfirmations = 1 NoNewHeadsThreshold = '0' diff --git a/ccip/config/evm/Mode_Mainnet.toml b/ccip/config/evm/Mode_Mainnet.toml index 69a8e93fecd..b586cdacc78 100644 --- a/ccip/config/evm/Mode_Mainnet.toml +++ b/ccip/config/evm/Mode_Mainnet.toml @@ -24,6 +24,9 @@ EIP1559FeeCapBufferBlocks = 0 [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] PollFailureThreshold = 2 diff --git a/ccip/config/evm/Mode_Sepolia.toml b/ccip/config/evm/Mode_Sepolia.toml index f7398869beb..d621010b4ef 100644 --- a/ccip/config/evm/Mode_Sepolia.toml +++ b/ccip/config/evm/Mode_Sepolia.toml @@ -24,6 +24,9 @@ EIP1559FeeCapBufferBlocks = 0 [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] PollFailureThreshold = 2 diff --git a/ccip/config/evm/Optimism_Mainnet.toml b/ccip/config/evm/Optimism_Mainnet.toml index b0f56a49d90..e1398775495 100644 --- a/ccip/config/evm/Optimism_Mainnet.toml +++ b/ccip/config/evm/Optimism_Mainnet.toml @@ -21,6 +21,9 @@ ResendAfterThreshold = '30s' [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Optimism_Sepolia.toml b/ccip/config/evm/Optimism_Sepolia.toml index 1c71aa5dd83..2590feec51a 100644 --- a/ccip/config/evm/Optimism_Sepolia.toml +++ b/ccip/config/evm/Optimism_Sepolia.toml @@ -20,6 +20,9 @@ ResendAfterThreshold = '30s' [HeadTracker] HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Polygon_Amoy.toml b/ccip/config/evm/Polygon_Amoy.toml index b05b3053a8e..eb75eab271b 100644 --- a/ccip/config/evm/Polygon_Amoy.toml +++ b/ccip/config/evm/Polygon_Amoy.toml @@ -11,10 +11,10 @@ NoNewFinalizedHeadsThreshold = '12m' MaxQueued = 5000 [GasEstimator] -EIP1559DynamicFees = true -PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' PriceDefault = '25 gwei' +PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' PriceMin = '25 gwei' +EIP1559DynamicFees = true BumpMin = '20 gwei' BumpThreshold = 5 @@ -23,6 +23,9 @@ BlockHistorySize = 24 [HeadTracker] HistoryDepth = 2000 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Polygon_Mainnet.toml b/ccip/config/evm/Polygon_Mainnet.toml index bf605cab3c6..555dbfff815 100644 --- a/ccip/config/evm/Polygon_Mainnet.toml +++ b/ccip/config/evm/Polygon_Mainnet.toml @@ -33,6 +33,9 @@ BlockHistorySize = 24 [HeadTracker] # Polygon suffers from a tremendous number of re-orgs, we need to set this to something very large to be conservative enough HistoryDepth = 2000 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [NodePool] SyncThreshold = 10 diff --git a/ccip/config/evm/Polygon_Mumbai.toml b/ccip/config/evm/Polygon_Mumbai.toml new file mode 100644 index 00000000000..83f275a0643 --- /dev/null +++ b/ccip/config/evm/Polygon_Mumbai.toml @@ -0,0 +1,31 @@ +ChainID = '80001' +FinalityDepth = 500 +FinalityTagEnabled = true +LinkContractAddress = '0x326C977E6efc84E512bB9C30f76E30c160eD06FB' +LogPollInterval = '1s' +MinIncomingConfirmations = 5 +NoNewHeadsThreshold = '30s' +RPCBlockQueryDelay = 10 +RPCDefaultBatchSize = 100 + +[Transactions] +MaxQueued = 5000 + +[GasEstimator] +PriceDefault = '25 gwei' +PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' +PriceMin = '25 gwei' +BumpMin = '20 gwei' +BumpThreshold = 5 + +[GasEstimator.BlockHistory] +BlockHistorySize = 24 + +[HeadTracker] +HistoryDepth = 2000 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false + +[NodePool] +SyncThreshold = 10 diff --git a/ccip/config/evm/Polygon_Zkevm_Cardona.toml b/ccip/config/evm/Polygon_Zkevm_Cardona.toml index 5e4861f9d44..146c23a8024 100644 --- a/ccip/config/evm/Polygon_Zkevm_Cardona.toml +++ b/ccip/config/evm/Polygon_Zkevm_Cardona.toml @@ -13,15 +13,20 @@ ContractConfirmations = 1 ResendAfterThreshold = '3m' [GasEstimator] -PriceMin = '1 mwei' +Mode = 'FeeHistory' +# The FeeHistory estimator does not enforce PriceMin, setting it to 0 to not place any limits on the price +PriceMin = '0' BumpPercent = 40 -BumpMin = '20 mwei' -[GasEstimator.BlockHistory] -BlockHistorySize = 12 +[GasEstimator.FeeHistory] +# Refresh the suggested price every 4 seconds, to stay slightly below their polling rate of 5s +CacheTimeout = '4s' [HeadTracker] HistoryDepth = 2000 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [Transactions.AutoPurge] Enabled = true diff --git a/ccip/config/evm/Polygon_Zkevm_Mainnet.toml b/ccip/config/evm/Polygon_Zkevm_Mainnet.toml index b38a483ff35..d42ef9b057e 100644 --- a/ccip/config/evm/Polygon_Zkevm_Mainnet.toml +++ b/ccip/config/evm/Polygon_Zkevm_Mainnet.toml @@ -1,6 +1,6 @@ ChainID = '1101' ChainType = 'zkevm' -FinalityDepth = 500 +FinalityDepth = 1000 NoNewHeadsThreshold = '6m' MinIncomingConfirmations = 1 LogPollInterval = '30s' @@ -14,12 +14,14 @@ ContractConfirmations = 1 ResendAfterThreshold = '3m' [GasEstimator] -PriceMin = '100 mwei' +Mode = 'FeeHistory' +# The FeeHistory estimator does not enforce PriceMin, setting it to 0 to not place any limits on the price +PriceMin = '0' BumpPercent = 40 -BumpMin = '100 mwei' -[GasEstimator.BlockHistory] -BlockHistorySize = 12 +[GasEstimator.FeeHistory] +# Refresh the suggested price every 4 seconds, to stay slightly below their polling rate of 5s +CacheTimeout = '4s' [HeadTracker] # Polygon suffers from a tremendous number of re-orgs, we need to set this to something very large to be conservative enough diff --git a/ccip/config/evm/RSK_Mainnet.toml b/ccip/config/evm/RSK_Mainnet.toml new file mode 100644 index 00000000000..8290481a331 --- /dev/null +++ b/ccip/config/evm/RSK_Mainnet.toml @@ -0,0 +1,13 @@ +# RSK prices its txes in sats not wei +ChainID = '30' +LinkContractAddress = '0x14AdaE34beF7ca957Ce2dDe5ADD97ea050123827' +LogPollInterval = '30s' +MinContractPayment = '0.001 link' + +[GasEstimator] +# It's about 100 times more expensive than Wei, very roughly speaking +PriceDefault = '50 mwei' +PriceMax = '50 gwei' +PriceMin = '0' +# rsk does not yet support EIP-1559 but this allows validation to pass +FeeCapDefault = '100 mwei' diff --git a/ccip/config/evm/RSK_Testnet.toml b/ccip/config/evm/RSK_Testnet.toml new file mode 100644 index 00000000000..2fde16aa7cc --- /dev/null +++ b/ccip/config/evm/RSK_Testnet.toml @@ -0,0 +1,10 @@ +ChainID = '31' +LinkContractAddress = '0x8bBbd80981FE76d44854D8DF305e8985c19f0e78' +MinContractPayment = '0.001 link' +LogPollInterval = '30s' + +[GasEstimator] +PriceDefault = '50 mwei' +PriceMax = '50 gwei' +PriceMin = '0' +FeeCapDefault = '100 mwei' diff --git a/ccip/config/evm/Ronin_Mainnet.toml b/ccip/config/evm/Ronin_Mainnet.toml new file mode 100644 index 00000000000..14bb9d1e258 --- /dev/null +++ b/ccip/config/evm/Ronin_Mainnet.toml @@ -0,0 +1,16 @@ +ChainID = "2020" +FinalityTagEnabled = true +LinkContractAddress = "0x3902228D6A3d2Dc44731fD9d45FeE6a61c722D0b" +# Ronin produces blocks every 3 seconds +LogPollInterval = "3s" +NoNewHeadsThreshold = "3m" + +[GasEstimator] +# Ronin uses default gas price of 20 gwei https://docs.skymavis.com/mavis/mpc/guides/estimate-gas#overview +Mode = 'FeeHistory' +PriceMax = "1000 gwei" + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Ronin_Saigon.toml b/ccip/config/evm/Ronin_Saigon.toml new file mode 100644 index 00000000000..b775f8f0626 --- /dev/null +++ b/ccip/config/evm/Ronin_Saigon.toml @@ -0,0 +1,16 @@ +ChainID = "2021" +FinalityTagEnabled = true +LinkContractAddress = "0x5bB50A6888ee6a67E22afFDFD9513be7740F1c15" +# Ronin produces blocks every 3 seconds +LogPollInterval = "3s" +NoNewHeadsThreshold = "3m" + +[GasEstimator] +# Ronin uses default gas price of 20 gwei https://docs.skymavis.com/mavis/mpc/guides/estimate-gas#overview +Mode = 'FeeHistory' +PriceMax = "1000 gwei" + +[HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/Scroll_Mainnet.toml b/ccip/config/evm/Scroll_Mainnet.toml index b8e7bd09e80..f0449ef12be 100644 --- a/ccip/config/evm/Scroll_Mainnet.toml +++ b/ccip/config/evm/Scroll_Mainnet.toml @@ -17,6 +17,9 @@ BlockHistorySize = 24 [HeadTracker] HistoryDepth = 50 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [OCR] ContractConfirmations = 1 diff --git a/ccip/config/evm/Scroll_Sepolia.toml b/ccip/config/evm/Scroll_Sepolia.toml index baee2080d96..aca06ae18d3 100644 --- a/ccip/config/evm/Scroll_Sepolia.toml +++ b/ccip/config/evm/Scroll_Sepolia.toml @@ -17,6 +17,9 @@ BlockHistorySize = 24 [HeadTracker] HistoryDepth = 50 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [OCR] ContractConfirmations = 1 diff --git a/ccip/config/evm/Simulated.toml b/ccip/config/evm/Simulated.toml index e21dc0990f0..4ec8d962b21 100644 --- a/ccip/config/evm/Simulated.toml +++ b/ccip/config/evm/Simulated.toml @@ -1,5 +1,5 @@ ChainID = '1337' -FinalityDepth = 1 +FinalityDepth = 10 MinIncomingConfirmations = 1 MinContractPayment = '100' NoNewHeadsThreshold = '0s' @@ -19,7 +19,9 @@ PriceMax = '100 micro' HistoryDepth = 10 MaxBufferSize = 100 SamplingInterval = '0s' -PersistenceEnabled = false +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [OCR] ContractConfirmations = 1 diff --git a/ccip/config/evm/Soneium_Sepolia.toml b/ccip/config/evm/Soneium_Sepolia.toml new file mode 100755 index 00000000000..e0ea59ca22f --- /dev/null +++ b/ccip/config/evm/Soneium_Sepolia.toml @@ -0,0 +1,35 @@ +ChainID = '1946' +ChainType = 'optimismBedrock' +LinkContractAddress = '0x7ea13478Ea3961A0e8b538cb05a9DF0477c79Cd2' +FinalityDepth = 200 +LogPollInterval = '2s' +NoNewHeadsThreshold = '40s' +MinIncomingConfirmations = 1 +NoNewFinalizedHeadsThreshold = '120m' # Soneium can take upto 2Hours to finalize +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +PriceMin = '1 wei' +BumpMin = '1 mwei' + +[GasEstimator.BlockHistory] +BlockHistorySize = 60 + +[Transactions] +ResendAfterThreshold = '30s' + +[HeadTracker] +HistoryDepth = 300 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false + +[NodePool] +SyncThreshold = 10 + +[OCR] +ContractConfirmations = 1 + +[OCR2.Automation] +GasLimit = 6500000 diff --git a/ccip/config/evm/Sonic_Mainnet.toml b/ccip/config/evm/Sonic_Mainnet.toml new file mode 100644 index 00000000000..523a931c8d6 --- /dev/null +++ b/ccip/config/evm/Sonic_Mainnet.toml @@ -0,0 +1,28 @@ +ChainId = '146' +FinalityDepth = 10 +FinalityTagEnabled = false +LogPollInterval = "1s" #1s block rate +MinIncomingConfirmations = 5 +RPCBlockQueryDelay = 10 +RPCDefaultBatchSize = 100 + +[GasEstimator] +Mode = 'FeeHistory' +EIP1559DynamicFees = true +BumpPercent = 10 +LimitDefault = 8000000 # default ccip value + +[GasEstimator.FeeHistory] +CacheTimeout = '2s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 + +[HeadTracker] +HistoryDepth = 50 + +[NodePool] +SyncThreshold = 10 + +[Transactions] +MaxQueued = 500 \ No newline at end of file diff --git a/ccip/config/evm/Sonic_Testnet.toml b/ccip/config/evm/Sonic_Testnet.toml new file mode 100644 index 00000000000..ca3ccf8f718 --- /dev/null +++ b/ccip/config/evm/Sonic_Testnet.toml @@ -0,0 +1,28 @@ +ChainId = '57054' +FinalityDepth = 10 +FinalityTagEnabled = false +LogPollInterval = "1s" #1s block rate +MinIncomingConfirmations = 5 +RPCBlockQueryDelay = 10 +RPCDefaultBatchSize = 100 + +[GasEstimator] +Mode = 'FeeHistory' +EIP1559DynamicFees = true +BumpPercent = 10 +LimitDefault = 8000000 # default ccip value + +[GasEstimator.FeeHistory] +CacheTimeout = '2s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 + +[HeadTracker] +HistoryDepth = 50 + +[NodePool] +SyncThreshold = 10 + +[Transactions] +MaxQueued = 500 \ No newline at end of file diff --git a/ccip/config/evm/Unichain_Testnet.toml b/ccip/config/evm/Unichain_Testnet.toml new file mode 100644 index 00000000000..5e18f0d4716 --- /dev/null +++ b/ccip/config/evm/Unichain_Testnet.toml @@ -0,0 +1,26 @@ +ChainID = '1301' +# OP stack: https://docs.unichain.org/docs/getting-started/set-up-a-node#overview +ChainType = 'optimismBedrock' +# finality_depth was: ~1900 +FinalityDepth = 2000 +# block_time was: ~1s, adding 1 second buffer +LogPollInterval = '2s' + +# batching_size_finalization_percentage = 30% according to the explorer batching view +# ( batching_size_finalization_percentage * finality_depth) * block_time / 60 secs = ~10 min (finality time) +# After running soak tests using 10m threw issues as there are batchs that take 35m, so we are bumping it to 45m to be sure +NoNewFinalizedHeadsThreshold = '45m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 1s, per recommendation skip 1-2 blocks +CacheTimeout = '2s' + +[GasEstimator.BlockHistory] +# As we see blocks containing between ~[8-12]tx, to get about ~1000 tx to check we would need to rougly go 100 tx back +BlockHistorySize = 100 diff --git a/ccip/config/evm/WeMix_Mainnet.toml b/ccip/config/evm/WeMix_Mainnet.toml index be7c278f692..a4e742d7300 100644 --- a/ccip/config/evm/WeMix_Mainnet.toml +++ b/ccip/config/evm/WeMix_Mainnet.toml @@ -16,4 +16,6 @@ EIP1559DynamicFees = true TipCapDefault = '100 gwei' [HeadTracker] -PersistenceEnabled = false +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false diff --git a/ccip/config/evm/WeMix_Testnet.toml b/ccip/config/evm/WeMix_Testnet.toml index 4591fc4c572..bfb75f158e3 100644 --- a/ccip/config/evm/WeMix_Testnet.toml +++ b/ccip/config/evm/WeMix_Testnet.toml @@ -16,5 +16,6 @@ EIP1559DynamicFees = true TipCapDefault = '100 gwei' [HeadTracker] +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 FinalityTagBypass = false -PersistenceEnabled = false diff --git a/ccip/config/evm/Worldchain_Mainnet.toml b/ccip/config/evm/Worldchain_Mainnet.toml new file mode 100644 index 00000000000..9b25d89d98c --- /dev/null +++ b/ccip/config/evm/Worldchain_Mainnet.toml @@ -0,0 +1,23 @@ +ChainID = '480' +# OP stack: https://worldcoin.notion.site/World-Chain-Developer-Preview-Guide-23c94a67683f4e71986e5303ab88c9f3 +ChainType = 'optimismBedrock' +# finality_depth was: ~2400 +FinalityDepth = 2500 +# block_time was: 2s, adding 1 second buffer +LogPollInterval = '3s' + +# finality_depth * block_time / 60 secs = ~83 min (finality time) +NoNewFinalizedHeadsThreshold = '90m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 2s, per recommendation skip 1-2 blocks +CacheTimeout = '4s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 diff --git a/ccip/config/evm/Worldchain_Testnet.toml b/ccip/config/evm/Worldchain_Testnet.toml new file mode 100644 index 00000000000..01618322285 --- /dev/null +++ b/ccip/config/evm/Worldchain_Testnet.toml @@ -0,0 +1,23 @@ +ChainID = '4801' +# OP stack: https://worldcoin.notion.site/World-Chain-Developer-Preview-Guide-23c94a67683f4e71986e5303ab88c9f3 +ChainType = 'optimismBedrock' +# finality_depth was: ~2400 +FinalityDepth = 2500 +# block_time was: 2s, adding 1 second buffer +LogPollInterval = '3s' + +# finality_depth * block_time / 60 secs = ~83 min (finality time) +NoNewFinalizedHeadsThreshold = '90m' + +FinalityTagEnabled = true + +[GasEstimator] +EIP1559DynamicFees = true +Mode = 'FeeHistory' + +[GasEstimator.FeeHistory] +# block_time was: 2s, per recommendation skip 1-2 blocks +CacheTimeout = '4s' + +[GasEstimator.BlockHistory] +BlockHistorySize = 100 diff --git a/ccip/config/evm/XLayer_Mainnet.toml b/ccip/config/evm/XLayer_Mainnet.toml index a39a9231ae2..28f7819276c 100644 --- a/ccip/config/evm/XLayer_Mainnet.toml +++ b/ccip/config/evm/XLayer_Mainnet.toml @@ -1,6 +1,6 @@ ChainID = '196' ChainType = 'xlayer' -FinalityDepth = 500 +FinalityDepth = 1000 NoNewHeadsThreshold = '6m' MinIncomingConfirmations = 1 LogPollInterval = '30s' diff --git a/ccip/config/evm/XLayer_Sepolia.toml b/ccip/config/evm/XLayer_Sepolia.toml index 2aa6e58469b..163d909542e 100644 --- a/ccip/config/evm/XLayer_Sepolia.toml +++ b/ccip/config/evm/XLayer_Sepolia.toml @@ -23,6 +23,9 @@ BlockHistorySize = 12 [HeadTracker] HistoryDepth = 2000 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false [Transactions.AutoPurge] Enabled = true diff --git a/ccip/config/evm/fallback.toml b/ccip/config/evm/fallback.toml new file mode 100644 index 00000000000..c1f963a33ff --- /dev/null +++ b/ccip/config/evm/fallback.toml @@ -0,0 +1,95 @@ +AutoCreateKey = true +BlockBackfillDepth = 10 +BlockBackfillSkip = false +FinalityDepth = 50 +FinalityTagEnabled = false +LogBackfillBatchSize = 1000 +LogPollInterval = '15s' +LogKeepBlocksDepth = 100000 +# CCIP uses paging when removing logs to avoid pushing too much pressure on the database +LogPrunePageSize = 10000 +BackupLogPollerBlockDelay = 100 +MinContractPayment = '.00001 link' +MinIncomingConfirmations = 3 +NonceAutoSync = true +NoNewHeadsThreshold = '3m' +RPCDefaultBatchSize = 250 +RPCBlockQueryDelay = 1 +FinalizedBlockOffset = 0 +NoNewFinalizedHeadsThreshold = '0' +LogBroadcasterEnabled = true + +[Transactions] +ForwardersEnabled = false +MaxInFlight = 16 +MaxQueued = 250 +ReaperInterval = '1h' +ReaperThreshold = '168h' +ResendAfterThreshold = '1m' + +[Transactions.AutoPurge] +Enabled = false + +[BalanceMonitor] +Enabled = true + +[GasEstimator] +Mode = 'BlockHistory' +PriceDefault = '20 gwei' +PriceMax = '115792089237316195423570985008687907853269984665.640564039457584007913129639935 tether' +PriceMin = '1 gwei' +LimitDefault = 8_000_000 +LimitMax = 8_000_000 +LimitMultiplier = '1' +LimitTransfer = 21_000 +BumpMin = '5 gwei' +BumpPercent = 20 +BumpThreshold = 3 +EIP1559DynamicFees = false +FeeCapDefault = '100 gwei' +TipCapDefault = '1' +TipCapMin = '1' +EstimateLimit = false + +[GasEstimator.BlockHistory] +BatchSize = 25 +BlockHistorySize = 8 +CheckInclusionBlocks = 12 +CheckInclusionPercentile = 90 +TransactionPercentile = 60 + +[GasEstimator.FeeHistory] +CacheTimeout = '10s' + +[HeadTracker] +HistoryDepth = 100 +MaxBufferSize = 3 +SamplingInterval = '1s' +FinalityTagBypass = true +MaxAllowedFinalityDepth = 10000 + +[NodePool] +PollFailureThreshold = 5 +PollInterval = '10s' +SelectionMode = 'HighestHead' +SyncThreshold = 5 +LeaseDuration = '0s' +NodeIsSyncingEnabled = false +FinalizedBlockPollInterval = '5s' +EnforceRepeatableRead = false +DeathDeclarationDelay = '10s' +NewHeadsPollInterval = '0s' + +[OCR] +ContractConfirmations = 4 +ContractTransmitterTransmitTimeout = '10s' +DatabaseTimeout = '10s' +DeltaCOverride = '168h' +DeltaCJitterOverride = '1h' +ObservationGracePeriod = '1s' + +[OCR2.Automation] +GasLimit = 5400000 + +[Workflow] +GasLimitDefault = 400_000 diff --git a/ccip/config/evm/zkSync_Mainnet.toml b/ccip/config/evm/zkSync_Mainnet.toml index a8910a37e4a..a29098690b4 100644 --- a/ccip/config/evm/zkSync_Mainnet.toml +++ b/ccip/config/evm/zkSync_Mainnet.toml @@ -28,4 +28,4 @@ OracleType = 'zksync' [HeadTracker] # tracks top N blocks to keep in heads database table. Should store atleast the same # of blocks as finalityDepth -HistoryDepth = 1500 \ No newline at end of file +HistoryDepth = 1500 diff --git a/ccip/config/evm/zkSync_Sepolia.toml b/ccip/config/evm/zkSync_Sepolia.toml index 6eb4ba4137e..36b0c9282da 100644 --- a/ccip/config/evm/zkSync_Sepolia.toml +++ b/ccip/config/evm/zkSync_Sepolia.toml @@ -1,23 +1,23 @@ ChainID = '300' ChainType = 'zksync' # 200block ~ 20min concurrent with the l1_committed tag -FinalityDepth = 200 +FinalityDepth = 200 # block rate is ~2-5sec, so this ensures blocks are polled correctly LogPollInterval = '5s' # sufficient time for RPC to be labelled out of sync, since blockRate is pretty fast NoNewHeadsThreshold = '1m' [GasEstimator] -# no EIP1559 to ensure our estimator doesnot estimate gas with MaxPriorityFee which will break minFunding requirement -EIP1559DynamicFees = false -# high LimitDefault for worst case pubdata bytes with BatchGasLimit reduced to 4M in OCR2Config +# no EIP1559 to ensure our estimator doesnot estimate gas with MaxPriorityFee which will break minFunding requirement +EIP1559DynamicFees = false +# high LimitDefault for worst case pubdata bytes with BatchGasLimit reduced to 4M in OCR2Config LimitDefault = 2_500_000_000 FeeCapDefault = '500 mwei' PriceDefault = '25 mwei' # p999 value for gasPrice based on historical data PriceMax = '500 mwei' # avg gasPrices are at 0.025 gwei -PriceMin = '25 mwei' +PriceMin = '25 mwei' [GasEstimator.BlockHistory] # increasing this to smooth out gas estimation @@ -28,4 +28,7 @@ OracleType = 'zksync' [HeadTracker] # tracks top N blocks to keep in heads database table. Should store atleast the same # of blocks as finalityDepth -HistoryDepth = 250 \ No newline at end of file +HistoryDepth = 250 +# FinalityDepth < 1k => FinalityTagBypass = false +# https://smartcontract-it.atlassian.net/browse/SHIP-4078 +FinalityTagBypass = false From 7e3b16945f901b18e9f6104ffc7c0a2897f6b646 Mon Sep 17 00:00:00 2001 From: yashnevatia Date: Wed, 8 Jan 2025 22:30:38 +0000 Subject: [PATCH 8/8] updates --- deployment/ccip/changeset/solana_state.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/deployment/ccip/changeset/solana_state.go b/deployment/ccip/changeset/solana_state.go index 6c5f774f3ac..8f92a3a79a2 100644 --- a/deployment/ccip/changeset/solana_state.go +++ b/deployment/ccip/changeset/solana_state.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/gagliardetto/solana-go" + "github.com/smartcontractkit/chainlink/deployment" ) @@ -26,18 +27,16 @@ func LoadOnchainStateSolana(e deployment.Environment) (CCIPOnChainState, error) addresses, err := e.ExistingAddresses.AddressesForChain(chainSelector) if err != nil { // Chain not found in address book, initialize empty - if errors.Is(err, deployment.ErrChainNotFound) { - addresses = make(map[string]deployment.TypeAndVersion) - } else { + if !errors.Is(err, deployment.ErrChainNotFound) { return state, err } + addresses = make(map[string]deployment.TypeAndVersion) } chainState, err := LoadChainStateSolana(chain, addresses) if err != nil { return state, err } state.SolChains[chainSelector] = chainState - } return state, nil }