Fixing logic that can lead to nil error being returned and retry stopping early.
This commit is contained in:
parent
6ee8f1454e
commit
d323c20896
2 changed files with 38 additions and 3 deletions
|
@ -58,6 +58,32 @@ func TestMain(m *testing.M) {
|
||||||
os.Exit(m.Run())
|
os.Exit(m.Run())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestBadEndpoint(t *testing.T) {
|
||||||
|
|
||||||
|
// Attempt to connect to a bad endpoint
|
||||||
|
r, err := realis.NewRealisClient(realis.SchedulerUrl("http://192.168.33.7:8081/scheduler/"),
|
||||||
|
realis.TimeoutMS(200),
|
||||||
|
realis.BackOff(&realis.Backoff{ // Reduce penalties for this test to make it quick
|
||||||
|
Steps: 5,
|
||||||
|
Duration: 1 * time.Second,
|
||||||
|
Factor: 1.0,
|
||||||
|
Jitter: 0.1}),
|
||||||
|
)
|
||||||
|
defer r.Close()
|
||||||
|
|
||||||
|
taskQ := &aurora.TaskQuery{
|
||||||
|
Role: "no",
|
||||||
|
Environment: "task",
|
||||||
|
JobName: "here",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = r.GetTasksWithoutConfigs(taskQ)
|
||||||
|
|
||||||
|
// Check that we do error out of retrying
|
||||||
|
assert.Error(t, err)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func TestLeaderFromZK(t *testing.T) {
|
func TestLeaderFromZK(t *testing.T) {
|
||||||
cluster := realis.GetDefaultClusterFromZKUrl("192.168.33.7:2181")
|
cluster := realis.GetDefaultClusterFromZKUrl("192.168.33.7:2181")
|
||||||
url, err := realis.LeaderFromZK(*cluster)
|
url, err := realis.LeaderFromZK(*cluster)
|
||||||
|
|
15
retry.go
15
retry.go
|
@ -17,10 +17,11 @@ limitations under the License.
|
||||||
package realis
|
package realis
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Jitter returns a time.Duration between duration and duration + maxFactor *
|
// Jitter returns a time.Duration between duration and duration + maxFactor *
|
||||||
|
@ -52,6 +53,8 @@ type ConditionFunc func() (done bool, err error)
|
||||||
// If the condition never returns true, ErrWaitTimeout is returned. All other
|
// If the condition never returns true, ErrWaitTimeout is returned. All other
|
||||||
// errors terminate immediately.
|
// errors terminate immediately.
|
||||||
func ExponentialBackoff(backoff Backoff, condition ConditionFunc) error {
|
func ExponentialBackoff(backoff Backoff, condition ConditionFunc) error {
|
||||||
|
var err error
|
||||||
|
var ok bool
|
||||||
duration := backoff.Duration
|
duration := backoff.Duration
|
||||||
for i := 0; i < backoff.Steps; i++ {
|
for i := 0; i < backoff.Steps; i++ {
|
||||||
if i != 0 {
|
if i != 0 {
|
||||||
|
@ -63,7 +66,7 @@ func ExponentialBackoff(backoff Backoff, condition ConditionFunc) error {
|
||||||
duration = time.Duration(float64(duration) * backoff.Factor)
|
duration = time.Duration(float64(duration) * backoff.Factor)
|
||||||
}
|
}
|
||||||
|
|
||||||
ok, err := condition()
|
ok, err = condition()
|
||||||
|
|
||||||
// If the function executed says it succeeded, stop retrying
|
// If the function executed says it succeeded, stop retrying
|
||||||
if ok {
|
if ok {
|
||||||
|
@ -78,5 +81,11 @@ func ExponentialBackoff(backoff Backoff, condition ConditionFunc) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return NewTimeoutError(errors.New("Timed out while retrying"))
|
|
||||||
|
// Provide more information to the user wherever possible
|
||||||
|
if err != nil {
|
||||||
|
return NewTimeoutError(errors.Wrap(err, "Timed out while retrying"))
|
||||||
|
} else {
|
||||||
|
return NewTimeoutError(errors.New("Timed out while retrying"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue