simplify things

Signed-off-by: Austin Abro <AustinAbro321@gmail.com>
pull/13604/head
Austin Abro 9 months ago
parent 28a9183ee3
commit 265442c5eb
No known key found for this signature in database
GPG Key ID: 92EB5159E403F9D6

@ -38,7 +38,6 @@ import (
apierrors "k8s.io/apimachinery/pkg/api/errors" apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime"
"sigs.k8s.io/cli-utils/pkg/kstatus/watcher" "sigs.k8s.io/cli-utils/pkg/kstatus/watcher"
"sigs.k8s.io/controller-runtime/pkg/client/apiutil"
multierror "github.com/hashicorp/go-multierror" multierror "github.com/hashicorp/go-multierror"
"k8s.io/apimachinery/pkg/api/meta" "k8s.io/apimachinery/pkg/api/meta"
@ -52,7 +51,6 @@ import (
"k8s.io/apimachinery/pkg/watch" "k8s.io/apimachinery/pkg/watch"
"k8s.io/cli-runtime/pkg/genericclioptions" "k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/cli-runtime/pkg/resource" "k8s.io/cli-runtime/pkg/resource"
"k8s.io/client-go/dynamic"
"k8s.io/client-go/kubernetes" "k8s.io/client-go/kubernetes"
"k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/kubernetes/scheme"
"k8s.io/client-go/rest" "k8s.io/client-go/rest"
@ -102,26 +100,11 @@ func init() {
} }
func getStatusWatcher(factory Factory) (watcher.StatusWatcher, error) { func getStatusWatcher(factory Factory) (watcher.StatusWatcher, error) {
cfg, err := factory.ToRESTConfig() dynamicClient, err := factory.DynamicClient()
if err != nil { if err != nil {
return nil, err return nil, err
} }
// factory.DynamicClient() may be a better choice here restMapper, err := factory.ToRESTMapper()
dynamicClient, err := dynamic.NewForConfig(cfg)
if err != nil {
return nil, err
}
// Not sure if I should use factory methods to get this http client or I should do this
// For example, I could likely use this as well, but it seems like I should use the factory methods instead
// httpClient, err := rest.HTTPClientFor(cfg)
// if err != nil {
// return err
// }
client, err := factory.RESTClient()
if err != nil {
return nil, err
}
restMapper, err := apiutil.NewDynamicRESTMapper(cfg, client.Client)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -141,9 +124,9 @@ func New(getter genericclioptions.RESTClientGetter, waiter Waiter) (*Client, err
return nil, err return nil, err
} }
waiter = &kstatusWaiter{ waiter = &kstatusWaiter{
sw: sw, sw: sw,
log: nopLogger, log: nopLogger,
pausedAsReady: true} }
} }
return &Client{ return &Client{
Factory: factory, Factory: factory,

@ -34,9 +34,8 @@ import (
) )
type kstatusWaiter struct { type kstatusWaiter struct {
sw watcher.StatusWatcher sw watcher.StatusWatcher
log func(string, ...interface{}) log func(string, ...interface{})
pausedAsReady bool
} }
func (w *kstatusWaiter) Wait(resourceList ResourceList, timeout time.Duration) error { func (w *kstatusWaiter) Wait(resourceList ResourceList, timeout time.Duration) error {
@ -62,7 +61,7 @@ func (w *kstatusWaiter) wait(ctx context.Context, resourceList ResourceList, wai
continue continue
} }
case *appsv1.Deployment: case *appsv1.Deployment:
if w.pausedAsReady && value.Spec.Paused { if value.Spec.Paused {
continue continue
} }
} }

@ -183,9 +183,8 @@ func TestKWaitJob(t *testing.T) {
assert.NoError(t, err) assert.NoError(t, err)
} }
kwaiter := kstatusWaiter{ kwaiter := kstatusWaiter{
sw: statusWatcher, sw: statusWatcher,
log: log.Printf, log: log.Printf,
pausedAsReady: tt.pausedAsReady,
} }
resourceList := ResourceList{} resourceList := ResourceList{}
@ -195,8 +194,8 @@ func TestKWaitJob(t *testing.T) {
resourceList = append(resourceList, list...) resourceList = append(resourceList, list...)
} }
ctx, cancel := context.WithTimeout(context.Background(), time.Second*3) ctx, cancel := context.WithTimeout(context.Background(), time.Second*3)
defer cancel() defer cancel()
err := kwaiter.wait(ctx, resourceList, tt.waitForJobs) err := kwaiter.wait(ctx, resourceList, tt.waitForJobs)
if tt.expectErrs != nil { if tt.expectErrs != nil {

Loading…
Cancel
Save