feat: implement exponential backoff strategy for error handling in extractor and loader processes; enhance retry configuration options

This commit is contained in:
2026-04-12 20:35:29 -05:00
parent 5633dc98d0
commit f126d5bbd0
7 changed files with 122 additions and 23 deletions

View File

@@ -5,12 +5,13 @@ import (
"fmt"
"sync"
"git.ksdemosapps.com/kylesoda/go-migrate/internal/app/config"
"git.ksdemosapps.com/kylesoda/go-migrate/internal/app/models"
)
type LoaderError struct {
models.Batch
Msg string
Batch models.Batch
Msg string
}
func (e *LoaderError) Error() string {
@@ -19,7 +20,7 @@ func (e *LoaderError) Error() string {
func LoaderErrorHandler(
ctx context.Context,
maxRetryAttempts int,
retryConfig config.RetryConfig,
chErrorsIn <-chan LoaderError,
chBatchesOut chan<- models.Batch,
chJobErrorsOut chan<- JobError,
@@ -39,11 +40,11 @@ func LoaderErrorHandler(
return
}
if err.RetryCounter >= maxRetryAttempts {
if err.Batch.RetryCounter >= retryConfig.Attempts {
wgActiveBatches.Done()
jobError := JobError{
ShouldCancelJob: false,
Msg: fmt.Sprintf("Batch %v reached max retries (%d)", err.Id, maxRetryAttempts),
Msg: fmt.Sprintf("Batch %v reached max retries (%d)", err.Batch.Id, retryConfig.Attempts),
Prev: &err,
}
@@ -68,13 +69,21 @@ func LoaderErrorHandler(
}
}
err.RetryCounter++
err.Batch.RetryCounter++
delay := computeBackoffDelay(
err.Batch.RetryCounter,
retryConfig.BaseDelayMs,
retryConfig.MaxDelayMs,
retryConfig.MaxJitterMs,
)
select {
case chBatchesOut <- err.Batch:
case <-ctx.Done():
return
}
requeueWithBackoff(ctx, delay, func() {
select {
case chBatchesOut <- err.Batch:
case <-ctx.Done():
return
}
})
}
}
}