Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add retry executor component #72

Merged
merged 2 commits into from
Jul 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
107 changes: 107 additions & 0 deletions http/retryexecutor/retryexecutor.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
package retryexecutor

import (
"context"
"errors"
"fmt"
"github.com/jfrog/gofrog/log"
"time"
)

type ExecutionHandlerFunc func() (shouldRetry bool, err error)

type RetryExecutor struct {
// The context
Context context.Context

// The amount of retries to perform.
MaxRetries int

// Number of milliseconds to sleep between retries.
RetriesIntervalMilliSecs int

// Message to display when retrying.
ErrorMessage string

// Prefix to print at the beginning of each log.
LogMsgPrefix string

// ExecutionHandler is the operation to run with retries.
ExecutionHandler ExecutionHandlerFunc
}

func (runner *RetryExecutor) Execute() error {
var err error
var shouldRetry bool
for i := 0; i <= runner.MaxRetries; i++ {
// Run ExecutionHandler
shouldRetry, err = runner.ExecutionHandler()

// If we should not retry, return.
if !shouldRetry {
return err
}
if cancelledErr := runner.checkCancelled(); cancelledErr != nil {
return cancelledErr
}

// Print retry log message
runner.LogRetry(i, err)

// Going to sleep for RetryInterval milliseconds
if runner.RetriesIntervalMilliSecs > 0 && i < runner.MaxRetries {
time.Sleep(time.Millisecond * time.Duration(runner.RetriesIntervalMilliSecs))
}
}
// If the error is not nil, return it and log the timeout message. Otherwise, generate new error.
if err != nil {
log.Info(runner.getTimeoutErrorMsg())
return err
}
return TimeoutError{runner.getTimeoutErrorMsg()}
}

// Error of this type will be returned if the executor reaches timeout and no other error is returned by the execution handler.
type TimeoutError struct {
errMsg string
}

func (retryErr TimeoutError) Error() string {
return retryErr.errMsg
}

func (runner *RetryExecutor) getTimeoutErrorMsg() string {
prefix := ""
if runner.LogMsgPrefix != "" {
prefix = runner.LogMsgPrefix + " "
}
return fmt.Sprintf("%sexecutor timeout after %v attempts with %v milliseconds wait intervals", prefix, runner.MaxRetries, runner.RetriesIntervalMilliSecs)
}

func (runner *RetryExecutor) LogRetry(attemptNumber int, err error) {
message := fmt.Sprintf("%s(Attempt %v)", runner.LogMsgPrefix, attemptNumber+1)
if runner.ErrorMessage != "" {
message = fmt.Sprintf("%s - %s", message, runner.ErrorMessage)
}
if err != nil {
message = fmt.Sprintf("%s: %s", message, err.Error())
}

if err != nil || runner.ErrorMessage != "" {
log.Warn(message)
} else {
log.Debug(message)
}
}

func (runner *RetryExecutor) checkCancelled() error {
if runner.Context == nil {
return nil
}
contextErr := runner.Context.Err()
if errors.Is(contextErr, context.Canceled) {
log.Info("Retry executor was cancelled")
return contextErr
}
return nil
}
90 changes: 90 additions & 0 deletions http/retryexecutor/retryexecutor_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
package retryexecutor

import (
"context"
"errors"
"github.com/jfrog/gofrog/log"
"github.com/stretchr/testify/assert"
"testing"
)

func TestRetryExecutorSuccess(t *testing.T) {
retriesToPerform := 10
breakRetriesAt := 4
runCount := 0
executor := RetryExecutor{
MaxRetries: retriesToPerform,
RetriesIntervalMilliSecs: 0,
ErrorMessage: "Testing RetryExecutor",
ExecutionHandler: func() (bool, error) {
runCount++
if runCount == breakRetriesAt {
log.Warn("Breaking after", runCount-1, "retries")
return false, nil
}
return true, nil
},
}

assert.NoError(t, executor.Execute())
assert.Equal(t, breakRetriesAt, runCount)
}

func TestRetryExecutorTimeoutWithDefaultError(t *testing.T) {
retriesToPerform := 5
runCount := 0

executor := RetryExecutor{
MaxRetries: retriesToPerform,
RetriesIntervalMilliSecs: 0,
ErrorMessage: "Testing RetryExecutor",
ExecutionHandler: func() (bool, error) {
runCount++
return true, nil
},
}

assert.Equal(t, executor.Execute(), TimeoutError{executor.getTimeoutErrorMsg()})
assert.Equal(t, retriesToPerform+1, runCount)
}

func TestRetryExecutorTimeoutWithCustomError(t *testing.T) {
retriesToPerform := 5
runCount := 0

executionHandler := errors.New("retry failed due to reason")

executor := RetryExecutor{
MaxRetries: retriesToPerform,
RetriesIntervalMilliSecs: 0,
ErrorMessage: "Testing RetryExecutor",
ExecutionHandler: func() (bool, error) {
runCount++
return true, executionHandler
},
}

assert.Equal(t, executor.Execute(), executionHandler)
assert.Equal(t, retriesToPerform+1, runCount)
}

func TestRetryExecutorCancel(t *testing.T) {
retriesToPerform := 5
runCount := 0

retryContext, cancelFunc := context.WithCancel(context.Background())
executor := RetryExecutor{
Context: retryContext,
MaxRetries: retriesToPerform,
RetriesIntervalMilliSecs: 0,
ErrorMessage: "Testing RetryExecutor",
ExecutionHandler: func() (bool, error) {
runCount++
return true, nil
},
}

cancelFunc()
assert.EqualError(t, executor.Execute(), context.Canceled.Error())
assert.Equal(t, 1, runCount)
}
Loading