Skip to content

Get controller.go test coverage to 100% #9

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jun 8, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 22 additions & 9 deletions pkg/controller/controller.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,13 @@ type controller struct {
// mu is used to synchronize controller setup
mu sync.Mutex

// jitterPeriod allows tests to reduce the jitterPeriod so they complete faster
jitterPeriod time.Duration

// waitForCache allows tests to mock out the waitForCache function to return an error
// defaults to cache.WaitForCacheSync
waitForCache func(stopCh <-chan struct{}, cacheSyncs ...cache.InformerSynced) bool

// TODO(pwittrock): Consider initializing a logger with the controller name as the tag
}

Expand Down Expand Up @@ -145,21 +152,30 @@ func (c *controller) Start(stop <-chan struct{}) error {
for _, informer := range allInformers {
syncedFuncs = append(syncedFuncs, informer.HasSynced)
}
if ok := cache.WaitForCacheSync(stop, syncedFuncs...); !ok {

if c.waitForCache == nil {
c.waitForCache = cache.WaitForCacheSync
}
if ok := c.waitForCache(stop, syncedFuncs...); !ok {
// This code is unreachable right now since WaitForCacheSync will never return an error
// Leaving it here because that could happen in the future
err := fmt.Errorf("failed to wait for %s caches to sync", c.name)
log.Error(err, "Could not wait for Cache to sync", "controller", c.name)
return err
}

if c.jitterPeriod == 0 {
c.jitterPeriod = time.Second
}

// Launch two workers to process resources
log.Info("Starting workers", "controller", c.name, "WorkerCount", c.maxConcurrentReconciles)
for i := 0; i < c.maxConcurrentReconciles; i++ {
// Continually process work items
// Process work items
go wait.Until(func() {
// TODO(pwittrock): Should we really use wait.Until to continuously restart this if it exits?
for c.processNextWorkItem() {
}
}, time.Second, stop)
}, c.jitterPeriod, stop)
}

<-stop
Expand All @@ -174,13 +190,12 @@ func (c *controller) processNextWorkItem() bool {

obj, shutdown := c.queue.Get()
if obj == nil {
log.Error(nil, "Encountered nil Request", "Object", obj)
// Sometimes the queue gives us nil items when it starts up
c.queue.Forget(obj)
}

if shutdown {
// Return false, this will cause the controller to back off for a second before trying again.
// TODO(community): This was copied from the sample-controller. Figure out why / if we need this.
// Stop working
return false
}

Expand Down Expand Up @@ -210,8 +225,6 @@ func (c *controller) processNextWorkItem() bool {
c.queue.AddRateLimited(req)
log.Error(nil, "reconcile error", "controller", c.name, "Request", req)

// Return false, this will cause the controller to back off for a second before trying again.
// TODO(community): This was copied from the sample-controller. Figure out why / if we need this.
return false
} else if result.Requeue {
c.queue.AddRateLimited(req)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import (

func TestSource(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecsWithDefaultAndCustomReporters(t, "controller Suite", []Reporter{test.NewlineReporter{}})
RunSpecsWithDefaultAndCustomReporters(t, "Controller Integration Suite", []Reporter{test.NewlineReporter{}})
}

var testenv *test.Environment
Expand All @@ -40,7 +40,7 @@ var clientset *kubernetes.Clientset
var icache informer.Informers

var _ = BeforeSuite(func() {
logf.SetLogger(logf.ZapLogger(true))
logf.SetLogger(logf.ZapLogger(false))

testenv = &test.Environment{}

Expand Down
Loading