This job view page is being replaced by Spyglass soon. Check out the new job view.
ResultFAILURE
Tests 1 failed / 2898 succeeded
Started2019-10-09 05:33
Elapsed35m4s
Revision
Buildergke-prow-ssd-pool-1a225945-qlhl
links{u'resultstore': {u'url': u'https://source.cloud.google.com/results/invocations/d179d9a6-9269-4dd7-ab40-9e007accb696/targets/test'}}
pod3981d887-ea56-11e9-bb6b-9a8df04ecf6a
resultstorehttps://source.cloud.google.com/results/invocations/d179d9a6-9269-4dd7-ab40-9e007accb696/targets/test
infra-commit2aa72c872
pod3981d887-ea56-11e9-bb6b-9a8df04ecf6a
repok8s.io/kubernetes
repo-commite62ed95ecd300d0dc56d5d19dcbae93e3794d1bc
repos{u'k8s.io/kubernetes': u'master'}

Test Failures


k8s.io/kubernetes/test/integration/scheduler TestNodePIDPressure 34s

go test -v k8s.io/kubernetes/test/integration/scheduler -run TestNodePIDPressure$
=== RUN   TestNodePIDPressure
W1009 06:02:50.962140  108297 services.go:35] No CIDR for service cluster IPs specified. Default value which was 10.0.0.0/24 is deprecated and will be removed in future releases. Please specify it using --service-cluster-ip-range on kube-apiserver.
I1009 06:02:50.962417  108297 services.go:47] Setting service IP to "10.0.0.1" (read-write).
I1009 06:02:50.962530  108297 master.go:305] Node port range unspecified. Defaulting to 30000-32767.
I1009 06:02:50.962634  108297 master.go:261] Using reconciler: 
I1009 06:02:50.967537  108297 storage_factory.go:285] storing podtemplates in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:50.968136  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:50.968349  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:50.971583  108297 reflector.go:185] Listing and watching *core.PodTemplate from storage/cacher.go:/podtemplates
I1009 06:02:50.973320  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:50.971527  108297 store.go:1342] Monitoring podtemplates count at <storage-prefix>//podtemplates
I1009 06:02:50.977194  108297 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:50.977989  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:50.978452  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:50.983458  108297 store.go:1342] Monitoring events count at <storage-prefix>//events
I1009 06:02:50.983553  108297 reflector.go:185] Listing and watching *core.Event from storage/cacher.go:/events
I1009 06:02:50.986118  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:50.987113  108297 storage_factory.go:285] storing limitranges in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:50.987670  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:50.987825  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:50.989655  108297 store.go:1342] Monitoring limitranges count at <storage-prefix>//limitranges
I1009 06:02:50.989884  108297 reflector.go:185] Listing and watching *core.LimitRange from storage/cacher.go:/limitranges
I1009 06:02:50.992809  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:50.995044  108297 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:50.995567  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:50.995706  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:50.998144  108297 store.go:1342] Monitoring resourcequotas count at <storage-prefix>//resourcequotas
I1009 06:02:50.998383  108297 reflector.go:185] Listing and watching *core.ResourceQuota from storage/cacher.go:/resourcequotas
I1009 06:02:51.000193  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
E1009 06:02:51.001836  108297 event_broadcaster.go:247] Unable to write event: 'Post http://127.0.0.1:39047/apis/events.k8s.io/v1beta1/namespaces/permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/events: dial tcp 127.0.0.1:39047: connect: connection refused' (may retry after sleeping)
I1009 06:02:51.005444  108297 storage_factory.go:285] storing secrets in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.006118  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.006304  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.009899  108297 store.go:1342] Monitoring secrets count at <storage-prefix>//secrets
I1009 06:02:51.009971  108297 reflector.go:185] Listing and watching *core.Secret from storage/cacher.go:/secrets
I1009 06:02:51.010548  108297 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.011152  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.011340  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.012407  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.014303  108297 store.go:1342] Monitoring persistentvolumes count at <storage-prefix>//persistentvolumes
I1009 06:02:51.014714  108297 reflector.go:185] Listing and watching *core.PersistentVolume from storage/cacher.go:/persistentvolumes
I1009 06:02:51.016381  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.017939  108297 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.018375  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.018540  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.027052  108297 store.go:1342] Monitoring persistentvolumeclaims count at <storage-prefix>//persistentvolumeclaims
I1009 06:02:51.027175  108297 reflector.go:185] Listing and watching *core.PersistentVolumeClaim from storage/cacher.go:/persistentvolumeclaims
I1009 06:02:51.027850  108297 storage_factory.go:285] storing configmaps in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.028073  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.028101  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.030255  108297 store.go:1342] Monitoring configmaps count at <storage-prefix>//configmaps
I1009 06:02:51.030490  108297 reflector.go:185] Listing and watching *core.ConfigMap from storage/cacher.go:/configmaps
I1009 06:02:51.031114  108297 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.031360  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.031369  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.031394  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.032924  108297 store.go:1342] Monitoring namespaces count at <storage-prefix>//namespaces
I1009 06:02:51.033232  108297 storage_factory.go:285] storing endpoints in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.033424  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.033448  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.033486  108297 reflector.go:185] Listing and watching *core.Namespace from storage/cacher.go:/namespaces
I1009 06:02:51.034105  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.035654  108297 store.go:1342] Monitoring endpoints count at <storage-prefix>//services/endpoints
I1009 06:02:51.036060  108297 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.036262  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.036297  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.036541  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.036589  108297 reflector.go:185] Listing and watching *core.Endpoints from storage/cacher.go:/services/endpoints
I1009 06:02:51.038283  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.038386  108297 store.go:1342] Monitoring nodes count at <storage-prefix>//minions
I1009 06:02:51.038657  108297 reflector.go:185] Listing and watching *core.Node from storage/cacher.go:/minions
I1009 06:02:51.038790  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.038994  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.039027  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.040077  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.040580  108297 store.go:1342] Monitoring pods count at <storage-prefix>//pods
I1009 06:02:51.041046  108297 reflector.go:185] Listing and watching *core.Pod from storage/cacher.go:/pods
I1009 06:02:51.041889  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.042581  108297 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.043506  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.043546  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.044713  108297 store.go:1342] Monitoring serviceaccounts count at <storage-prefix>//serviceaccounts
I1009 06:02:51.045037  108297 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.045283  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.045308  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.045458  108297 reflector.go:185] Listing and watching *core.ServiceAccount from storage/cacher.go:/serviceaccounts
I1009 06:02:51.046924  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.048658  108297 store.go:1342] Monitoring services count at <storage-prefix>//services/specs
I1009 06:02:51.048787  108297 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.048890  108297 reflector.go:185] Listing and watching *core.Service from storage/cacher.go:/services/specs
I1009 06:02:51.049073  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.049113  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.051241  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.053160  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.053207  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.054761  108297 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.055270  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.055428  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.057219  108297 store.go:1342] Monitoring replicationcontrollers count at <storage-prefix>//controllers
I1009 06:02:51.057402  108297 rest.go:115] the default service ipfamily for this cluster is: IPv4
I1009 06:02:51.058416  108297 storage_factory.go:285] storing bindings in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.059056  108297 storage_factory.go:285] storing componentstatuses in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.058681  108297 reflector.go:185] Listing and watching *core.ReplicationController from storage/cacher.go:/controllers
I1009 06:02:51.061213  108297 storage_factory.go:285] storing configmaps in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.062056  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.063272  108297 storage_factory.go:285] storing endpoints in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.064419  108297 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.065542  108297 storage_factory.go:285] storing limitranges in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.066989  108297 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.067312  108297 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.067756  108297 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.068573  108297 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.069555  108297 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.070849  108297 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.072458  108297 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.073085  108297 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.073970  108297 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.075139  108297 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.076400  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.076882  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.077276  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.077654  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.078184  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.078803  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.079220  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.081008  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.081695  108297 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.083914  108297 storage_factory.go:285] storing podtemplates in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.085258  108297 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.085815  108297 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.086469  108297 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.088546  108297 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.089236  108297 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.090397  108297 storage_factory.go:285] storing secrets in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.092057  108297 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.093157  108297 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.094951  108297 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.096811  108297 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.097220  108297 master.go:453] Skipping disabled API group "auditregistration.k8s.io".
I1009 06:02:51.097385  108297 master.go:464] Enabling API group "authentication.k8s.io".
I1009 06:02:51.097479  108297 master.go:464] Enabling API group "authorization.k8s.io".
I1009 06:02:51.097860  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.098314  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.098443  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.100565  108297 store.go:1342] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I1009 06:02:51.100670  108297 reflector.go:185] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I1009 06:02:51.101373  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.101746  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.101946  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.103714  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.106026  108297 store.go:1342] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I1009 06:02:51.106052  108297 reflector.go:185] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I1009 06:02:51.106682  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.107052  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.107126  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.108052  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.109533  108297 store.go:1342] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I1009 06:02:51.109575  108297 master.go:464] Enabling API group "autoscaling".
I1009 06:02:51.109630  108297 reflector.go:185] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I1009 06:02:51.109963  108297 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.110196  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.110222  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.111699  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.111715  108297 store.go:1342] Monitoring jobs.batch count at <storage-prefix>//jobs
I1009 06:02:51.112032  108297 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.112177  108297 reflector.go:185] Listing and watching *batch.Job from storage/cacher.go:/jobs
I1009 06:02:51.112192  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.112211  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.114708  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.114806  108297 store.go:1342] Monitoring cronjobs.batch count at <storage-prefix>//cronjobs
I1009 06:02:51.114960  108297 master.go:464] Enabling API group "batch".
I1009 06:02:51.114839  108297 reflector.go:185] Listing and watching *batch.CronJob from storage/cacher.go:/cronjobs
I1009 06:02:51.115287  108297 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.116012  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.116045  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.116558  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.117648  108297 store.go:1342] Monitoring certificatesigningrequests.certificates.k8s.io count at <storage-prefix>//certificatesigningrequests
I1009 06:02:51.117702  108297 master.go:464] Enabling API group "certificates.k8s.io".
I1009 06:02:51.118011  108297 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.118113  108297 reflector.go:185] Listing and watching *certificates.CertificateSigningRequest from storage/cacher.go:/certificatesigningrequests
I1009 06:02:51.118183  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.118205  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.120204  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.121174  108297 store.go:1342] Monitoring leases.coordination.k8s.io count at <storage-prefix>//leases
I1009 06:02:51.121237  108297 reflector.go:185] Listing and watching *coordination.Lease from storage/cacher.go:/leases
I1009 06:02:51.121496  108297 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.121919  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.122034  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.122846  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.124986  108297 store.go:1342] Monitoring leases.coordination.k8s.io count at <storage-prefix>//leases
I1009 06:02:51.125150  108297 master.go:464] Enabling API group "coordination.k8s.io".
I1009 06:02:51.125241  108297 master.go:453] Skipping disabled API group "discovery.k8s.io".
I1009 06:02:51.125712  108297 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.126187  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.125077  108297 reflector.go:185] Listing and watching *coordination.Lease from storage/cacher.go:/leases
I1009 06:02:51.126313  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.128353  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.128796  108297 store.go:1342] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I1009 06:02:51.129024  108297 master.go:464] Enabling API group "extensions".
I1009 06:02:51.129431  108297 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.128907  108297 reflector.go:185] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I1009 06:02:51.129671  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.129825  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.130823  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.131120  108297 store.go:1342] Monitoring networkpolicies.networking.k8s.io count at <storage-prefix>//networkpolicies
I1009 06:02:51.131294  108297 reflector.go:185] Listing and watching *networking.NetworkPolicy from storage/cacher.go:/networkpolicies
I1009 06:02:51.131487  108297 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.132550  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.132720  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.134007  108297 store.go:1342] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I1009 06:02:51.134043  108297 master.go:464] Enabling API group "networking.k8s.io".
I1009 06:02:51.134116  108297 storage_factory.go:285] storing runtimeclasses.node.k8s.io in node.k8s.io/v1beta1, reading as node.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.134170  108297 reflector.go:185] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I1009 06:02:51.134295  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.134316  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.135592  108297 store.go:1342] Monitoring runtimeclasses.node.k8s.io count at <storage-prefix>//runtimeclasses
I1009 06:02:51.135618  108297 master.go:464] Enabling API group "node.k8s.io".
I1009 06:02:51.135895  108297 reflector.go:185] Listing and watching *node.RuntimeClass from storage/cacher.go:/runtimeclasses
I1009 06:02:51.136012  108297 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.136198  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.136224  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.137604  108297 store.go:1342] Monitoring poddisruptionbudgets.policy count at <storage-prefix>//poddisruptionbudgets
I1009 06:02:51.137807  108297 reflector.go:185] Listing and watching *policy.PodDisruptionBudget from storage/cacher.go:/poddisruptionbudgets
I1009 06:02:51.137899  108297 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.138052  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.138085  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.139992  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.140354  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.142647  108297 store.go:1342] Monitoring podsecuritypolicies.policy count at <storage-prefix>//podsecuritypolicy
I1009 06:02:51.142803  108297 master.go:464] Enabling API group "policy".
I1009 06:02:51.142705  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.142955  108297 reflector.go:185] Listing and watching *policy.PodSecurityPolicy from storage/cacher.go:/podsecuritypolicy
I1009 06:02:51.142928  108297 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.143387  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.143464  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.143995  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.145697  108297 store.go:1342] Monitoring roles.rbac.authorization.k8s.io count at <storage-prefix>//roles
I1009 06:02:51.146826  108297 reflector.go:185] Listing and watching *rbac.Role from storage/cacher.go:/roles
I1009 06:02:51.147829  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.148392  108297 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.148914  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.148949  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.150425  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.151762  108297 store.go:1342] Monitoring rolebindings.rbac.authorization.k8s.io count at <storage-prefix>//rolebindings
I1009 06:02:51.151932  108297 reflector.go:185] Listing and watching *rbac.RoleBinding from storage/cacher.go:/rolebindings
I1009 06:02:51.152070  108297 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.152381  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.153125  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.153325  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.155096  108297 store.go:1342] Monitoring clusterroles.rbac.authorization.k8s.io count at <storage-prefix>//clusterroles
I1009 06:02:51.155142  108297 reflector.go:185] Listing and watching *rbac.ClusterRole from storage/cacher.go:/clusterroles
I1009 06:02:51.155609  108297 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.156190  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.156223  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.157532  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.158913  108297 store.go:1342] Monitoring clusterrolebindings.rbac.authorization.k8s.io count at <storage-prefix>//clusterrolebindings
I1009 06:02:51.159019  108297 reflector.go:185] Listing and watching *rbac.ClusterRoleBinding from storage/cacher.go:/clusterrolebindings
I1009 06:02:51.159038  108297 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.159236  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.159274  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.160368  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.162403  108297 store.go:1342] Monitoring roles.rbac.authorization.k8s.io count at <storage-prefix>//roles
I1009 06:02:51.162568  108297 reflector.go:185] Listing and watching *rbac.Role from storage/cacher.go:/roles
I1009 06:02:51.163556  108297 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.163992  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.164114  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.164857  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.166171  108297 store.go:1342] Monitoring rolebindings.rbac.authorization.k8s.io count at <storage-prefix>//rolebindings
I1009 06:02:51.166230  108297 reflector.go:185] Listing and watching *rbac.RoleBinding from storage/cacher.go:/rolebindings
I1009 06:02:51.166688  108297 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.167116  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.167194  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.169012  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.170970  108297 store.go:1342] Monitoring clusterroles.rbac.authorization.k8s.io count at <storage-prefix>//clusterroles
I1009 06:02:51.171151  108297 reflector.go:185] Listing and watching *rbac.ClusterRole from storage/cacher.go:/clusterroles
I1009 06:02:51.172989  108297 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.173457  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.173563  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.175595  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.177124  108297 store.go:1342] Monitoring clusterrolebindings.rbac.authorization.k8s.io count at <storage-prefix>//clusterrolebindings
I1009 06:02:51.177199  108297 master.go:464] Enabling API group "rbac.authorization.k8s.io".
I1009 06:02:51.177434  108297 reflector.go:185] Listing and watching *rbac.ClusterRoleBinding from storage/cacher.go:/clusterrolebindings
I1009 06:02:51.180877  108297 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.181130  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.181168  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.183095  108297 store.go:1342] Monitoring priorityclasses.scheduling.k8s.io count at <storage-prefix>//priorityclasses
I1009 06:02:51.183552  108297 reflector.go:185] Listing and watching *scheduling.PriorityClass from storage/cacher.go:/priorityclasses
I1009 06:02:51.184194  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.184819  108297 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.185251  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.185618  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.186772  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.187444  108297 store.go:1342] Monitoring priorityclasses.scheduling.k8s.io count at <storage-prefix>//priorityclasses
I1009 06:02:51.187476  108297 master.go:464] Enabling API group "scheduling.k8s.io".
I1009 06:02:51.187494  108297 reflector.go:185] Listing and watching *scheduling.PriorityClass from storage/cacher.go:/priorityclasses
I1009 06:02:51.187642  108297 master.go:453] Skipping disabled API group "settings.k8s.io".
I1009 06:02:51.188486  108297 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.188647  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.188670  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.190472  108297 store.go:1342] Monitoring storageclasses.storage.k8s.io count at <storage-prefix>//storageclasses
I1009 06:02:51.190661  108297 reflector.go:185] Listing and watching *storage.StorageClass from storage/cacher.go:/storageclasses
I1009 06:02:51.190825  108297 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.191232  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.191271  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.193792  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.194330  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.195368  108297 store.go:1342] Monitoring volumeattachments.storage.k8s.io count at <storage-prefix>//volumeattachments
I1009 06:02:51.195480  108297 reflector.go:185] Listing and watching *storage.VolumeAttachment from storage/cacher.go:/volumeattachments
I1009 06:02:51.196399  108297 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.197153  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.198693  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.199378  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.202641  108297 store.go:1342] Monitoring csinodes.storage.k8s.io count at <storage-prefix>//csinodes
I1009 06:02:51.202741  108297 reflector.go:185] Listing and watching *storage.CSINode from storage/cacher.go:/csinodes
I1009 06:02:51.202771  108297 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.202975  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.203019  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.204262  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.205020  108297 store.go:1342] Monitoring csidrivers.storage.k8s.io count at <storage-prefix>//csidrivers
I1009 06:02:51.205407  108297 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.205568  108297 reflector.go:185] Listing and watching *storage.CSIDriver from storage/cacher.go:/csidrivers
I1009 06:02:51.205682  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.206153  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.207856  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.209596  108297 store.go:1342] Monitoring storageclasses.storage.k8s.io count at <storage-prefix>//storageclasses
I1009 06:02:51.209964  108297 reflector.go:185] Listing and watching *storage.StorageClass from storage/cacher.go:/storageclasses
I1009 06:02:51.210573  108297 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.210879  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.211001  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.211870  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.242680  108297 store.go:1342] Monitoring volumeattachments.storage.k8s.io count at <storage-prefix>//volumeattachments
I1009 06:02:51.245231  108297 master.go:464] Enabling API group "storage.k8s.io".
I1009 06:02:51.246657  108297 reflector.go:185] Listing and watching *storage.VolumeAttachment from storage/cacher.go:/volumeattachments
I1009 06:02:51.246693  108297 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.282912  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.288679  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.288789  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.321025  108297 store.go:1342] Monitoring deployments.apps count at <storage-prefix>//deployments
I1009 06:02:51.327087  108297 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.322826  108297 reflector.go:185] Listing and watching *apps.Deployment from storage/cacher.go:/deployments
I1009 06:02:51.342352  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.380831  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.380925  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.397914  108297 store.go:1342] Monitoring statefulsets.apps count at <storage-prefix>//statefulsets
I1009 06:02:51.398426  108297 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.399523  108297 reflector.go:185] Listing and watching *apps.StatefulSet from storage/cacher.go:/statefulsets
I1009 06:02:51.401455  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.415482  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.415585  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.419830  108297 store.go:1342] Monitoring daemonsets.apps count at <storage-prefix>//daemonsets
I1009 06:02:51.420004  108297 reflector.go:185] Listing and watching *apps.DaemonSet from storage/cacher.go:/daemonsets
I1009 06:02:51.420266  108297 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.420610  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.420662  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.421988  108297 store.go:1342] Monitoring replicasets.apps count at <storage-prefix>//replicasets
I1009 06:02:51.422454  108297 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.422651  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.422717  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.423173  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.423179  108297 reflector.go:185] Listing and watching *apps.ReplicaSet from storage/cacher.go:/replicasets
I1009 06:02:51.425898  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.425975  108297 store.go:1342] Monitoring controllerrevisions.apps count at <storage-prefix>//controllerrevisions
I1009 06:02:51.426007  108297 master.go:464] Enabling API group "apps".
I1009 06:02:51.426952  108297 reflector.go:185] Listing and watching *apps.ControllerRevision from storage/cacher.go:/controllerrevisions
I1009 06:02:51.430058  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.433533  108297 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.434047  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.434113  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.435127  108297 store.go:1342] Monitoring validatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//validatingwebhookconfigurations
I1009 06:02:51.435179  108297 reflector.go:185] Listing and watching *admissionregistration.ValidatingWebhookConfiguration from storage/cacher.go:/validatingwebhookconfigurations
I1009 06:02:51.436593  108297 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.457585  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.457785  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.457842  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.460589  108297 store.go:1342] Monitoring mutatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//mutatingwebhookconfigurations
I1009 06:02:51.460890  108297 reflector.go:185] Listing and watching *admissionregistration.MutatingWebhookConfiguration from storage/cacher.go:/mutatingwebhookconfigurations
I1009 06:02:51.460949  108297 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.461268  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.461304  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.462412  108297 store.go:1342] Monitoring validatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//validatingwebhookconfigurations
I1009 06:02:51.462531  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.462528  108297 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.462780  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.462820  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.462977  108297 reflector.go:185] Listing and watching *admissionregistration.ValidatingWebhookConfiguration from storage/cacher.go:/validatingwebhookconfigurations
I1009 06:02:51.464857  108297 store.go:1342] Monitoring mutatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//mutatingwebhookconfigurations
I1009 06:02:51.464895  108297 master.go:464] Enabling API group "admissionregistration.k8s.io".
I1009 06:02:51.465007  108297 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.465513  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.465561  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.465765  108297 reflector.go:185] Listing and watching *admissionregistration.MutatingWebhookConfiguration from storage/cacher.go:/mutatingwebhookconfigurations
I1009 06:02:51.466056  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.468716  108297 store.go:1342] Monitoring events count at <storage-prefix>//events
I1009 06:02:51.468788  108297 master.go:464] Enabling API group "events.k8s.io".
I1009 06:02:51.468902  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.469331  108297 storage_factory.go:285] storing tokenreviews.authentication.k8s.io in authentication.k8s.io/v1, reading as authentication.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.469387  108297 reflector.go:185] Listing and watching *core.Event from storage/cacher.go:/events
I1009 06:02:51.470425  108297 storage_factory.go:285] storing tokenreviews.authentication.k8s.io in authentication.k8s.io/v1, reading as authentication.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.470931  108297 watch_cache.go:451] Replace watchCache (rev: 30241) 
I1009 06:02:51.496493  108297 storage_factory.go:285] storing localsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.496896  108297 storage_factory.go:285] storing selfsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.497315  108297 storage_factory.go:285] storing selfsubjectrulesreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.497713  108297 storage_factory.go:285] storing subjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.498196  108297 storage_factory.go:285] storing localsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.498474  108297 storage_factory.go:285] storing selfsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.498828  108297 storage_factory.go:285] storing selfsubjectrulesreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.499096  108297 storage_factory.go:285] storing subjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.520423  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.534530  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.537119  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.537895  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.554441  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.555297  108297 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.565307  108297 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.566807  108297 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.569481  108297 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.570271  108297 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W1009 06:02:51.570445  108297 genericapiserver.go:404] Skipping API batch/v2alpha1 because it has no resources.
I1009 06:02:51.573133  108297 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.573519  108297 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.574336  108297 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.576624  108297 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.580854  108297 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.582485  108297 storage_factory.go:285] storing ingresses.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.583107  108297 storage_factory.go:285] storing ingresses.extensions in extensions/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.584545  108297 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.629901  108297 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.663548  108297 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.684397  108297 storage_factory.go:285] storing runtimeclasses.node.k8s.io in node.k8s.io/v1beta1, reading as node.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W1009 06:02:51.684547  108297 genericapiserver.go:404] Skipping API node.k8s.io/v1alpha1 because it has no resources.
I1009 06:02:51.685649  108297 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.686162  108297 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.686766  108297 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.687663  108297 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.688214  108297 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.690254  108297 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.691021  108297 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.691689  108297 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.692275  108297 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.693043  108297 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.703095  108297 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W1009 06:02:51.703216  108297 genericapiserver.go:404] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
I1009 06:02:51.706094  108297 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.708461  108297 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W1009 06:02:51.708584  108297 genericapiserver.go:404] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
I1009 06:02:51.711582  108297 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.712244  108297 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.712522  108297 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.716624  108297 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.717436  108297 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.718264  108297 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.719650  108297 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W1009 06:02:51.719756  108297 genericapiserver.go:404] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
I1009 06:02:51.722192  108297 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.723098  108297 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.723422  108297 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.724210  108297 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.724498  108297 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.724805  108297 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.725571  108297 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.725919  108297 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.726224  108297 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.727140  108297 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.727445  108297 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.727766  108297 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
W1009 06:02:51.727857  108297 genericapiserver.go:404] Skipping API apps/v1beta2 because it has no resources.
W1009 06:02:51.727865  108297 genericapiserver.go:404] Skipping API apps/v1beta1 because it has no resources.
I1009 06:02:51.728648  108297 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.738213  108297 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.740211  108297 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.741048  108297 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.742146  108297 storage_factory.go:285] storing events.events.k8s.io in events.k8s.io/v1beta1, reading as events.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"3584e5ce-ed8c-415e-9367-7f1eb9a7e185", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", CAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000}
I1009 06:02:51.749996  108297 httplog.go:90] GET /api/v1/namespaces/default/endpoints/kubernetes: (3.002123ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.751761  108297 healthz.go:177] healthz check etcd failed: etcd client connection not yet established
I1009 06:02:51.751786  108297 healthz.go:177] healthz check poststarthook/bootstrap-controller failed: not finished
I1009 06:02:51.751807  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:51.751820  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:51.751830  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:51.751839  108297 healthz.go:191] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[-]poststarthook/bootstrap-controller failed: reason withheld
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:51.751880  108297 httplog.go:90] GET /healthz: (260.883µs) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:51.754675  108297 httplog.go:90] GET /api/v1/services: (1.549548ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.760543  108297 httplog.go:90] GET /api/v1/services: (1.470043ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.763149  108297 healthz.go:177] healthz check etcd failed: etcd client connection not yet established
I1009 06:02:51.763190  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:51.763205  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:51.763216  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:51.763226  108297 healthz.go:191] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:51.763256  108297 httplog.go:90] GET /healthz: (222.323µs) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.764762  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (1.761828ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39680]
I1009 06:02:51.767154  108297 httplog.go:90] POST /api/v1/namespaces: (1.838167ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39680]
I1009 06:02:51.770445  108297 httplog.go:90] GET /api/v1/services: (1.202176ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39684]
I1009 06:02:51.770975  108297 httplog.go:90] GET /api/v1/services: (6.43352ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.772923  108297 httplog.go:90] GET /api/v1/namespaces/kube-public: (5.28284ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39680]
I1009 06:02:51.775600  108297 httplog.go:90] POST /api/v1/namespaces: (1.992374ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.777062  108297 httplog.go:90] GET /api/v1/namespaces/kube-node-lease: (1.056459ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.780224  108297 httplog.go:90] POST /api/v1/namespaces: (2.287568ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.853041  108297 healthz.go:177] healthz check etcd failed: etcd client connection not yet established
I1009 06:02:51.853104  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:51.853121  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:51.853133  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:51.853154  108297 healthz.go:191] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:51.853206  108297 httplog.go:90] GET /healthz: (442.07µs) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:51.864231  108297 healthz.go:177] healthz check etcd failed: etcd client connection not yet established
I1009 06:02:51.864272  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:51.864287  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:51.864297  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:51.864308  108297 healthz.go:191] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:51.864351  108297 httplog.go:90] GET /healthz: (330.184µs) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:51.952978  108297 healthz.go:177] healthz check etcd failed: etcd client connection not yet established
I1009 06:02:51.953052  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:51.953072  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:51.953084  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:51.953095  108297 healthz.go:191] [+]ping ok
[+]log ok
[-]etcd failed: reason withheld
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:51.953149  108297 httplog.go:90] GET /healthz: (466.401µs) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:51.962828  108297 client.go:361] parsed scheme: "endpoint"
I1009 06:02:51.962969  108297 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
I1009 06:02:51.965645  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:51.965680  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:51.965693  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:51.965704  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:51.965883  108297 httplog.go:90] GET /healthz: (1.832874ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.057609  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.057665  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.057679  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.057706  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.057788  108297 httplog.go:90] GET /healthz: (2.852763ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.067350  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.067404  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.067418  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.067428  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.067517  108297 httplog.go:90] GET /healthz: (2.611796ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.158047  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.158085  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.158096  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.158105  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.158153  108297 httplog.go:90] GET /healthz: (1.491168ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.165883  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.165921  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.165932  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.165944  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.165997  108297 httplog.go:90] GET /healthz: (2.005637ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.255127  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.255173  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.255186  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.255198  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.255261  108297 httplog.go:90] GET /healthz: (2.610465ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.265972  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.266017  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.266028  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.266039  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.266097  108297 httplog.go:90] GET /healthz: (1.942721ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.355445  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.355492  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.355504  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.355517  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.355588  108297 httplog.go:90] GET /healthz: (2.887004ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.368257  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.368291  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.368302  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.368312  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.368368  108297 httplog.go:90] GET /healthz: (1.931167ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.455021  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.455067  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.455079  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.455090  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.455159  108297 httplog.go:90] GET /healthz: (2.363421ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.466348  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.466401  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.466415  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.466425  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.466483  108297 httplog.go:90] GET /healthz: (2.418101ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.554351  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.554395  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.554406  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.554417  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.554473  108297 httplog.go:90] GET /healthz: (1.85227ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.565713  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.565775  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.565788  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.565800  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.565882  108297 httplog.go:90] GET /healthz: (1.788737ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.654884  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.654934  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.654945  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.654954  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.655008  108297 httplog.go:90] GET /healthz: (2.045106ms) 0 [Go-http-client/1.1 127.0.0.1:39682]
I1009 06:02:52.665238  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.665276  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.665287  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.665296  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.665361  108297 httplog.go:90] GET /healthz: (1.394415ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.751362  108297 httplog.go:90] GET /apis/scheduling.k8s.io/v1beta1/priorityclasses/system-node-critical: (3.503642ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39684]
I1009 06:02:52.751897  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (3.537031ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.755101  108297 httplog.go:90] POST /apis/scheduling.k8s.io/v1beta1/priorityclasses: (2.918747ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39684]
I1009 06:02:52.755314  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles: (7.788035ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.755612  108297 storage_scheduling.go:139] created PriorityClass system-node-critical with value 2000001000
I1009 06:02:52.756077  108297 httplog.go:90] GET /api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication: (2.234151ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.759595  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.759621  108297 healthz.go:177] healthz check poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I1009 06:02:52.759635  108297 healthz.go:177] healthz check poststarthook/ca-registration failed: not finished
I1009 06:02:52.759649  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld
[-]poststarthook/ca-registration failed: reason withheld
healthz check failed
I1009 06:02:52.759697  108297 httplog.go:90] GET /healthz: (5.800976ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:52.759748  108297 httplog.go:90] GET /apis/scheduling.k8s.io/v1beta1/priorityclasses/system-cluster-critical: (1.587262ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.760066  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.276061ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39684]
I1009 06:02:52.763888  108297 httplog.go:90] POST /api/v1/namespaces/kube-system/configmaps: (3.983798ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39682]
I1009 06:02:52.763990  108297 httplog.go:90] POST /apis/scheduling.k8s.io/v1beta1/priorityclasses: (3.436298ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.764134  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-admin: (3.52579ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39684]
I1009 06:02:52.764227  108297 storage_scheduling.go:139] created PriorityClass system-cluster-critical with value 2000000000
I1009 06:02:52.764244  108297 storage_scheduling.go:148] all system priority classes are created successfully or already exist.
I1009 06:02:52.766977  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.767014  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:52.767050  108297 httplog.go:90] GET /healthz: (1.141705ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.767409  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/admin: (1.977602ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39684]
I1009 06:02:52.769061  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-edit: (1.223177ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.770412  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/edit: (979.766µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.771910  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-view: (1.182838ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.773570  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/view: (1.059806ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.775410  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:discovery: (1.379948ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.777567  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/cluster-admin: (1.269614ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.780505  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.330891ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.780718  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/cluster-admin
I1009 06:02:52.782239  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:discovery: (1.299896ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.786628  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.07784ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.787177  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:discovery
I1009 06:02:52.789522  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:basic-user: (1.606115ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.794064  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (4.053034ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.794546  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:basic-user
I1009 06:02:52.798665  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:public-info-viewer: (3.92942ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.802842  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.727833ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.803425  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:public-info-viewer
I1009 06:02:52.805183  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/admin: (1.387818ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.809291  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.303328ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.809687  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/admin
I1009 06:02:52.811780  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/edit: (1.518245ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.814928  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.515223ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.815376  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/edit
I1009 06:02:52.817228  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/view: (1.352492ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.820198  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.331033ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.820651  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/view
I1009 06:02:52.822147  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-admin: (1.14023ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.826053  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.021637ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.826491  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-admin
I1009 06:02:52.827908  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-edit: (1.006359ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.831601  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.992694ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.832065  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-edit
I1009 06:02:52.833903  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:aggregate-to-view: (1.352796ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.837840  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.274097ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.838157  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-view
I1009 06:02:52.839583  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:heapster: (1.164633ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.842543  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.451338ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.842965  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:heapster
I1009 06:02:52.844166  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node: (979.307µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.848693  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.452453ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.849183  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:node
I1009 06:02:52.852262  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node-problem-detector: (2.53736ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.854380  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.854414  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:52.854459  108297 httplog.go:90] GET /healthz: (1.691496ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:52.867765  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.867807  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:52.867885  108297 httplog.go:90] GET /healthz: (4.018931ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.874719  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (20.424562ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.875055  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:node-problem-detector
I1009 06:02:52.877238  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kubelet-api-admin: (1.814293ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.904962  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (26.626491ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.914204  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:kubelet-api-admin
I1009 06:02:52.916676  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node-bootstrapper: (1.786891ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.920002  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.590175ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.920261  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:node-bootstrapper
I1009 06:02:52.922520  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:auth-delegator: (1.943992ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.927303  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.414691ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.927702  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:auth-delegator
I1009 06:02:52.929078  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-aggregator: (1.141637ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.931179  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.749141ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.931377  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:kube-aggregator
I1009 06:02:52.932616  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-controller-manager: (929.227µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.935566  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.361926ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.935949  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:kube-controller-manager
I1009 06:02:52.937131  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-dns: (957.457µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.940370  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.747206ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.940811  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:kube-dns
I1009 06:02:52.942278  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:persistent-volume-provisioner: (1.226883ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.945263  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.419593ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.945589  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:persistent-volume-provisioner
I1009 06:02:52.946941  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:csi-external-attacher: (1.087319ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.949873  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.237497ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.950293  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:csi-external-attacher
I1009 06:02:52.951965  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:certificates.k8s.io:certificatesigningrequests:nodeclient: (1.496567ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.953435  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.953486  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:52.953531  108297 httplog.go:90] GET /healthz: (1.107219ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:52.956951  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.613986ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.957197  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:nodeclient
I1009 06:02:52.959374  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient: (1.977421ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.963168  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.89719ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:52.965137  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:52.965521  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:52.965808  108297 httplog.go:90] GET /healthz: (1.927212ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.966371  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient
I1009 06:02:52.968382  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:volume-scheduler: (1.52915ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.971697  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.431748ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.972361  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:volume-scheduler
I1009 06:02:52.974109  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:node-proxier: (1.288665ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.977680  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.710025ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.978207  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:node-proxier
I1009 06:02:52.979850  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:kube-scheduler: (1.333944ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.983705  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.172039ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.984310  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:kube-scheduler
I1009 06:02:52.985899  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:csi-external-provisioner: (1.20105ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.989353  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.877079ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.989833  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:csi-external-provisioner
I1009 06:02:52.991351  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:attachdetach-controller: (1.12808ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.994142  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.112114ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.994497  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I1009 06:02:52.996306  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:clusterrole-aggregation-controller: (1.403449ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.998679  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.834765ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:52.999074  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I1009 06:02:53.000336  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:cronjob-controller: (1.05018ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.002696  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.86805ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.003158  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:cronjob-controller
I1009 06:02:53.004914  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:daemon-set-controller: (1.318922ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.008043  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.547863ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.008624  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I1009 06:02:53.010498  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:deployment-controller: (1.618902ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.013889  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.611492ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.014315  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:deployment-controller
I1009 06:02:53.016272  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:disruption-controller: (1.633454ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.020914  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.923237ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.021451  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:disruption-controller
I1009 06:02:53.023394  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:endpoint-controller: (1.564869ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.026544  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.313674ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.026833  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:endpoint-controller
I1009 06:02:53.028532  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:expand-controller: (1.305625ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.031496  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.223365ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.031805  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:expand-controller
I1009 06:02:53.033346  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:generic-garbage-collector: (1.331726ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.036261  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.294296ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.036614  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I1009 06:02:53.038242  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:horizontal-pod-autoscaler: (1.130585ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.040927  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.012757ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.041289  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I1009 06:02:53.042590  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:job-controller: (1.083835ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.045365  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.16726ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.045902  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:job-controller
I1009 06:02:53.047444  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:namespace-controller: (1.287841ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.050065  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.087373ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.050259  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:namespace-controller
I1009 06:02:53.051253  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:node-controller: (837.527µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.053160  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.511526ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.053756  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.053785  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.053821  108297 httplog.go:90] GET /healthz: (1.252129ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:53.054185  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:node-controller
I1009 06:02:53.055438  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:persistent-volume-binder: (891.696µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.058212  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.233627ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.058587  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I1009 06:02:53.059930  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:pod-garbage-collector: (1.114963ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.062173  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.76641ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.062558  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I1009 06:02:53.064046  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:replicaset-controller: (1.217056ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.064913  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.065189  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.065425  108297 httplog.go:90] GET /healthz: (1.533351ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.068405  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.760641ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.068916  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:replicaset-controller
I1009 06:02:53.070909  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:replication-controller: (1.546202ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.074760  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (3.140868ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.075181  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:replication-controller
I1009 06:02:53.077208  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:resourcequota-controller: (1.676323ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.080578  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.59297ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.081056  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I1009 06:02:53.082591  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:route-controller: (1.276261ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.085118  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.064376ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.085761  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:route-controller
I1009 06:02:53.087755  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:service-account-controller: (1.76017ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.090661  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.185393ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.090945  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:service-account-controller
I1009 06:02:53.092546  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:service-controller: (1.424357ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.095506  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.493262ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.095765  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:service-controller
I1009 06:02:53.097403  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:statefulset-controller: (1.421958ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.100563  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.580068ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.100800  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:statefulset-controller
I1009 06:02:53.102151  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:ttl-controller: (1.109806ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.104516  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.782719ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.104711  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:ttl-controller
I1009 06:02:53.106164  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:certificate-controller: (1.274261ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.108637  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.094672ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.108895  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:certificate-controller
I1009 06:02:53.110109  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:pvc-protection-controller: (994.025µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.112344  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (1.901383ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.112690  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I1009 06:02:53.114018  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterroles/system:controller:pv-protection-controller: (950.203µs) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.117698  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterroles: (2.85957ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.118128  108297 storage_rbac.go:219] created clusterrole.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I1009 06:02:53.119749  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/cluster-admin: (1.129919ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.122415  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.252905ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.122800  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/cluster-admin
I1009 06:02:53.124073  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:discovery: (1.06111ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.129358  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.213108ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.130073  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:discovery
I1009 06:02:53.149435  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:basic-user: (2.082495ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.153855  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.153889  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.153938  108297 httplog.go:90] GET /healthz: (1.350239ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:53.165314  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.165353  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.165397  108297 httplog.go:90] GET /healthz: (1.413981ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.169769  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.589323ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.170019  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:basic-user
I1009 06:02:53.190098  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:public-info-viewer: (2.637573ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.211188  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.722475ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.211543  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:public-info-viewer
I1009 06:02:53.231433  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:node-proxier: (3.373076ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.252194  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.724412ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.253535  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:node-proxier
I1009 06:02:53.254937  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.254975  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.255026  108297 httplog.go:90] GET /healthz: (2.110025ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.265565  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.265603  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.265651  108297 httplog.go:90] GET /healthz: (1.684763ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.271710  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-controller-manager: (4.329068ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.290912  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.542385ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.292622  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-controller-manager
I1009 06:02:53.309915  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-dns: (2.470906ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.330441  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.12655ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.331815  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-dns
I1009 06:02:53.350479  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:kube-scheduler: (3.022221ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.354537  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.354583  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.354643  108297 httplog.go:90] GET /healthz: (1.852142ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.365575  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.365619  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.365682  108297 httplog.go:90] GET /healthz: (1.724029ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.370257  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.130503ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.370991  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-scheduler
I1009 06:02:53.390055  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:volume-scheduler: (1.782945ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.412136  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.564381ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.412552  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:volume-scheduler
I1009 06:02:53.429327  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:node: (1.896957ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.451617  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.186498ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.452042  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:node
I1009 06:02:53.455326  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.455361  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.455426  108297 httplog.go:90] GET /healthz: (1.967961ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.466050  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.466102  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.466164  108297 httplog.go:90] GET /healthz: (2.101384ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.469842  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:attachdetach-controller: (1.889027ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.490645  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.272252ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.491299  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I1009 06:02:53.509474  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:clusterrole-aggregation-controller: (2.184819ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.533383  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (5.875392ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.534514  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I1009 06:02:53.554238  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.554286  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.554332  108297 httplog.go:90] GET /healthz: (1.595374ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.556031  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:cronjob-controller: (1.973456ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.567086  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.567143  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.567209  108297 httplog.go:90] GET /healthz: (2.994333ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.571510  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.431894ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.572336  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:cronjob-controller
I1009 06:02:53.590081  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:daemon-set-controller: (2.689602ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.611620  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.936538ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.611997  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I1009 06:02:53.630274  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:deployment-controller: (2.882081ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.652320  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.75901ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.653520  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:deployment-controller
I1009 06:02:53.654631  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.654663  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.654715  108297 httplog.go:90] GET /healthz: (1.759603ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.665892  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.665936  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.665988  108297 httplog.go:90] GET /healthz: (1.813939ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.669354  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:disruption-controller: (1.837573ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:53.693595  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (5.950899ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.694875  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:disruption-controller
I1009 06:02:53.710654  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:endpoint-controller: (3.267551ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.733923  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.926583ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.735200  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:endpoint-controller
E1009 06:02:53.736470  108297 factory.go:681] Error getting pod permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/test-pod for retry: Get http://127.0.0.1:39047/api/v1/namespaces/permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/pods/test-pod: dial tcp 127.0.0.1:39047: connect: connection refused; retrying...
I1009 06:02:53.749148  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:expand-controller: (1.589424ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.761010  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.761051  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.761113  108297 httplog.go:90] GET /healthz: (1.468683ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.765280  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.765322  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.765383  108297 httplog.go:90] GET /healthz: (1.355311ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.771214  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.964827ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.771530  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:expand-controller
I1009 06:02:53.789230  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:generic-garbage-collector: (1.890972ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.810977  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.566198ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.811520  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I1009 06:02:53.829785  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:horizontal-pod-autoscaler: (2.307503ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.850503  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (2.967898ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.851157  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I1009 06:02:53.853902  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.853932  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.853979  108297 httplog.go:90] GET /healthz: (1.501631ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.866004  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.866061  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.866137  108297 httplog.go:90] GET /healthz: (2.149224ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.869109  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:job-controller: (1.89675ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.893220  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (5.701869ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.893651  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:job-controller
I1009 06:02:53.909573  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:namespace-controller: (1.865074ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.931241  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.724394ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.931611  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:namespace-controller
I1009 06:02:53.949500  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:node-controller: (2.087412ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.954090  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.954131  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.954174  108297 httplog.go:90] GET /healthz: (1.402917ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:53.965580  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:53.965619  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:53.965683  108297 httplog.go:90] GET /healthz: (1.566988ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.972023  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.670809ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:53.972352  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:node-controller
I1009 06:02:53.990017  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:persistent-volume-binder: (2.505048ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.011608  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.055532ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.011952  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I1009 06:02:54.029654  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pod-garbage-collector: (2.265376ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.055868  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (7.965608ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.056518  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I1009 06:02:54.056785  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.056810  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.056855  108297 httplog.go:90] GET /healthz: (4.306552ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:54.082507  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:replicaset-controller: (15.018156ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.082782  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.082807  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.082839  108297 httplog.go:90] GET /healthz: (18.265771ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.099132  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (11.859433ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.099489  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replicaset-controller
I1009 06:02:54.109821  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:replication-controller: (2.274806ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.132028  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.692312ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.132369  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replication-controller
I1009 06:02:54.157173  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.157245  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.157318  108297 httplog.go:90] GET /healthz: (4.579702ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:54.166387  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:resourcequota-controller: (19.011591ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.170614  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.170650  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.170700  108297 httplog.go:90] GET /healthz: (4.337782ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.174124  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (6.498641ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.174381  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I1009 06:02:54.190478  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:route-controller: (2.944111ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.217955  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (7.383293ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.218368  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:route-controller
I1009 06:02:54.231931  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:service-account-controller: (2.681362ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.251812  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (4.435187ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.252171  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-account-controller
I1009 06:02:54.253941  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.253978  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.254018  108297 httplog.go:90] GET /healthz: (1.265001ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:54.270350  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.270396  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.270470  108297 httplog.go:90] GET /healthz: (4.371671ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.277330  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:service-controller: (6.643125ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.292927  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.869924ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.293273  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-controller
I1009 06:02:54.309514  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:statefulset-controller: (2.060674ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.333936  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (6.438849ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.338774  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:statefulset-controller
I1009 06:02:54.349165  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:ttl-controller: (1.846619ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.359420  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.359473  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.359538  108297 httplog.go:90] GET /healthz: (6.981579ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:54.365448  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.365486  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.365545  108297 httplog.go:90] GET /healthz: (1.461224ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.372178  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.574177ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.372494  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:ttl-controller
I1009 06:02:54.389439  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:certificate-controller: (2.204751ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.414677  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (7.233082ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.419299  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:certificate-controller
I1009 06:02:54.430118  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pvc-protection-controller: (2.469753ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.450486  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.0631ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.450917  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I1009 06:02:54.454140  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.454180  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.454233  108297 httplog.go:90] GET /healthz: (1.711003ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:54.465961  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.466002  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.466058  108297 httplog.go:90] GET /healthz: (2.100699ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.469309  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/clusterrolebindings/system:controller:pv-protection-controller: (2.137612ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.491527  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/clusterrolebindings: (3.927399ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.492095  108297 storage_rbac.go:247] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I1009 06:02:54.509456  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/extension-apiserver-authentication-reader: (2.038739ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.513324  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (3.232543ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.530658  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (3.327309ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.531025  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/extension-apiserver-authentication-reader in kube-system
I1009 06:02:54.550062  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:controller:bootstrap-signer: (2.620504ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.553389  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.575394ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.554338  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.554372  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.554425  108297 httplog.go:90] GET /healthz: (1.449308ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:54.565602  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.565647  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.565702  108297 httplog.go:90] GET /healthz: (1.683385ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.570231  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (3.008028ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.570538  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I1009 06:02:54.590356  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:controller:cloud-provider: (2.786938ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.593503  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.411293ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.611291  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (3.762839ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.611900  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I1009 06:02:54.629718  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system:controller:token-cleaner: (2.3163ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.633064  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.379805ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.651777  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (4.341685ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.652380  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I1009 06:02:54.654344  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.654385  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.654477  108297 httplog.go:90] GET /healthz: (1.985758ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:54.666268  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.666706  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.668464  108297 httplog.go:90] GET /healthz: (4.264314ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.669714  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system::leader-locking-kube-controller-manager: (2.537611ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.672373  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.115924ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.691280  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (3.879283ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.692328  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I1009 06:02:54.710368  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles/system::leader-locking-kube-scheduler: (2.861113ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.714157  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.591679ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.730518  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/roles: (3.064284ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.730894  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I1009 06:02:54.749916  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/roles/system:controller:bootstrap-signer: (2.609541ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.752093  108297 httplog.go:90] GET /api/v1/namespaces/kube-public: (1.589331ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.753547  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.753770  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.754079  108297 httplog.go:90] GET /healthz: (1.48853ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:54.766331  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.766700  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.767202  108297 httplog.go:90] GET /healthz: (2.961499ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.770166  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/roles: (2.886294ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.770499  108297 storage_rbac.go:278] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I1009 06:02:54.790173  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system::extension-apiserver-authentication-reader: (2.65674ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.794014  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.402893ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.812344  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (4.478658ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.812696  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system::extension-apiserver-authentication-reader in kube-system
I1009 06:02:54.832319  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system::leader-locking-kube-controller-manager: (4.519712ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.838559  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (4.248405ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.853830  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (6.395545ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.854435  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I1009 06:02:54.857961  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.857999  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.858049  108297 httplog.go:90] GET /healthz: (2.726012ms) 0 [Go-http-client/1.1 127.0.0.1:39704]
I1009 06:02:54.866222  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.866284  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.866388  108297 httplog.go:90] GET /healthz: (2.436424ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.869345  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system::leader-locking-kube-scheduler: (2.023154ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.873476  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (3.497793ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.895176  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (6.817302ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.895597  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I1009 06:02:54.909081  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:controller:bootstrap-signer: (1.799611ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.927294  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (17.662078ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.934777  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (6.18997ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.935128  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I1009 06:02:54.950423  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:controller:cloud-provider: (2.125771ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.953523  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (2.312404ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:54.953542  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.953645  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.953694  108297 httplog.go:90] GET /healthz: (1.205345ms) 0 [Go-http-client/1.1 127.0.0.1:39702]
I1009 06:02:54.965298  108297 healthz.go:177] healthz check poststarthook/rbac/bootstrap-roles failed: not finished
I1009 06:02:54.965334  108297 healthz.go:191] [+]ping ok
[+]log ok
[+]etcd ok
[+]poststarthook/generic-apiserver-start-informers ok
[+]poststarthook/bootstrap-controller ok
[-]poststarthook/rbac/bootstrap-roles failed: reason withheld
[+]poststarthook/scheduling/bootstrap-system-priority-classes ok
[+]poststarthook/ca-registration ok
healthz check failed
I1009 06:02:54.965377  108297 httplog.go:90] GET /healthz: (1.314007ms) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.969467  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (2.311362ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.969785  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I1009 06:02:54.989239  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings/system:controller:token-cleaner: (1.889504ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:54.991987  108297 httplog.go:90] GET /api/v1/namespaces/kube-system: (1.891762ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:55.012302  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-system/rolebindings: (5.013936ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:55.012641  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I1009 06:02:55.030416  108297 httplog.go:90] GET /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/rolebindings/system:controller:bootstrap-signer: (3.046007ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:55.035241  108297 httplog.go:90] GET /api/v1/namespaces/kube-public: (3.734846ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:55.050521  108297 httplog.go:90] POST /apis/rbac.authorization.k8s.io/v1/namespaces/kube-public/rolebindings: (3.135317ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:55.051169  108297 storage_rbac.go:308] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I1009 06:02:55.055154  108297 httplog.go:90] GET /healthz: (1.133209ms) 200 [Go-http-client/1.1 127.0.0.1:39702]
W1009 06:02:55.056107  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056206  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056226  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056241  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056259  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056275  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056287  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056305  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056331  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056344  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
W1009 06:02:55.056370  108297 mutation_detector.go:50] Mutation detector is enabled, this will result in memory leakage.
I1009 06:02:55.056440  108297 factory.go:281] Creating scheduler from algorithm provider 'DefaultProvider'
I1009 06:02:55.056455  108297 factory.go:369] Creating scheduler with fit predicates 'map[CheckNodeCondition:{} CheckNodeDiskPressure:{} CheckNodeMemoryPressure:{} CheckNodePIDPressure:{} CheckVolumeBinding:{} GeneralPredicates:{} MatchInterPodAffinity:{} MaxAzureDiskVolumeCount:{} MaxCSIVolumeCountPred:{} MaxEBSVolumeCount:{} MaxGCEPDVolumeCount:{} NoDiskConflict:{} NoVolumeZoneConflict:{} PodToleratesNodeTaints:{}]' and priority functions 'map[BalancedResourceAllocation:{} ImageLocalityPriority:{} InterPodAffinityPriority:{} LeastRequestedPriority:{} NodeAffinityPriority:{} NodePreferAvoidPodsPriority:{} SelectorSpreadPriority:{} TaintTolerationPriority:{}]'
I1009 06:02:55.056820  108297 shared_informer.go:197] Waiting for caches to sync for scheduler
I1009 06:02:55.057108  108297 reflector.go:150] Starting reflector *v1.Pod (12h0m0s) from k8s.io/kubernetes/test/integration/scheduler/util.go:210
I1009 06:02:55.057123  108297 reflector.go:185] Listing and watching *v1.Pod from k8s.io/kubernetes/test/integration/scheduler/util.go:210
I1009 06:02:55.062947  108297 httplog.go:90] GET /api/v1/pods?fieldSelector=status.phase%21%3DFailed%2Cstatus.phase%21%3DSucceeded&limit=500&resourceVersion=0: (1.139032ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:02:55.065426  108297 get.go:251] Starting watch for /api/v1/pods, rv=30241 labels= fields=status.phase!=Failed,status.phase!=Succeeded timeout=5m4s
I1009 06:02:55.068513  108297 httplog.go:90] GET /healthz: (1.979489ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.070571  108297 httplog.go:90] GET /api/v1/namespaces/default: (1.665527ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.073744  108297 httplog.go:90] POST /api/v1/namespaces: (2.20489ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.075583  108297 httplog.go:90] GET /api/v1/namespaces/default/services/kubernetes: (1.474097ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.082219  108297 httplog.go:90] POST /api/v1/namespaces/default/services: (6.151694ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.084628  108297 httplog.go:90] GET /api/v1/namespaces/default/endpoints/kubernetes: (1.561096ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.090126  108297 httplog.go:90] POST /api/v1/namespaces/default/endpoints: (4.685819ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.157039  108297 shared_informer.go:227] caches populated
I1009 06:02:55.157086  108297 shared_informer.go:204] Caches are synced for scheduler 
I1009 06:02:55.157503  108297 reflector.go:150] Starting reflector *v1.PersistentVolumeClaim (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.157526  108297 reflector.go:185] Listing and watching *v1.PersistentVolumeClaim from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158022  108297 reflector.go:150] Starting reflector *v1.ReplicaSet (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158046  108297 reflector.go:185] Listing and watching *v1.ReplicaSet from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158077  108297 reflector.go:150] Starting reflector *v1.Node (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158099  108297 reflector.go:185] Listing and watching *v1.Node from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158120  108297 reflector.go:150] Starting reflector *v1.Service (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158133  108297 reflector.go:185] Listing and watching *v1.Service from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158395  108297 reflector.go:150] Starting reflector *v1beta1.PodDisruptionBudget (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158419  108297 reflector.go:185] Listing and watching *v1beta1.PodDisruptionBudget from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158507  108297 reflector.go:150] Starting reflector *v1.StorageClass (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158520  108297 reflector.go:185] Listing and watching *v1.StorageClass from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158597  108297 reflector.go:150] Starting reflector *v1.PersistentVolume (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158606  108297 reflector.go:150] Starting reflector *v1.ReplicationController (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158610  108297 reflector.go:185] Listing and watching *v1.PersistentVolume from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158621  108297 reflector.go:185] Listing and watching *v1.ReplicationController from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158928  108297 reflector.go:150] Starting reflector *v1.StatefulSet (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.158948  108297 reflector.go:185] Listing and watching *v1.StatefulSet from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.159975  108297 httplog.go:90] GET /apis/policy/v1beta1/poddisruptionbudgets?limit=500&resourceVersion=0: (637.645µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40178]
I1009 06:02:55.159975  108297 httplog.go:90] GET /api/v1/persistentvolumes?limit=500&resourceVersion=0: (624.121µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40182]
I1009 06:02:55.160050  108297 httplog.go:90] GET /api/v1/persistentvolumeclaims?limit=500&resourceVersion=0: (912.531µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:02:55.160315  108297 httplog.go:90] GET /api/v1/nodes?limit=500&resourceVersion=0: (360.721µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40176]
I1009 06:02:55.160325  108297 httplog.go:90] GET /apis/storage.k8s.io/v1/storageclasses?limit=500&resourceVersion=0: (342.43µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40180]
I1009 06:02:55.160345  108297 httplog.go:90] GET /apis/apps/v1/statefulsets?limit=500&resourceVersion=0: (1.01539ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40186]
I1009 06:02:55.160471  108297 httplog.go:90] GET /api/v1/replicationcontrollers?limit=500&resourceVersion=0: (927.396µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40184]
I1009 06:02:55.160917  108297 httplog.go:90] GET /api/v1/services?limit=500&resourceVersion=0: (484.149µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.160912  108297 get.go:251] Starting watch for /api/v1/persistentvolumeclaims, rv=30241 labels= fields= timeout=7m41s
I1009 06:02:55.161458  108297 get.go:251] Starting watch for /api/v1/replicationcontrollers, rv=30241 labels= fields= timeout=5m40s
I1009 06:02:55.161483  108297 httplog.go:90] GET /apis/apps/v1/replicasets?limit=500&resourceVersion=0: (454.897µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40170]
I1009 06:02:55.161536  108297 get.go:251] Starting watch for /api/v1/nodes, rv=30241 labels= fields= timeout=6m37s
I1009 06:02:55.161995  108297 get.go:251] Starting watch for /apis/storage.k8s.io/v1/storageclasses, rv=30241 labels= fields= timeout=7m10s
I1009 06:02:55.162063  108297 get.go:251] Starting watch for /api/v1/persistentvolumes, rv=30241 labels= fields= timeout=6m11s
I1009 06:02:55.162292  108297 get.go:251] Starting watch for /api/v1/services, rv=30359 labels= fields= timeout=6m39s
I1009 06:02:55.162571  108297 get.go:251] Starting watch for /apis/apps/v1/statefulsets, rv=30241 labels= fields= timeout=5m38s
I1009 06:02:55.162714  108297 get.go:251] Starting watch for /apis/apps/v1/replicasets, rv=30241 labels= fields= timeout=6m20s
I1009 06:02:55.163535  108297 get.go:251] Starting watch for /apis/policy/v1beta1/poddisruptionbudgets, rv=30241 labels= fields= timeout=9m4s
I1009 06:02:55.164099  108297 reflector.go:150] Starting reflector *v1beta1.CSINode (1s) from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.164120  108297 reflector.go:185] Listing and watching *v1beta1.CSINode from k8s.io/client-go/informers/factory.go:134
I1009 06:02:55.164972  108297 httplog.go:90] GET /apis/storage.k8s.io/v1beta1/csinodes?limit=500&resourceVersion=0: (423.432µs) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40192]
I1009 06:02:55.165636  108297 get.go:251] Starting watch for /apis/storage.k8s.io/v1beta1/csinodes, rv=30241 labels= fields= timeout=7m58s
I1009 06:02:55.257412  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257465  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257475  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257482  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257489  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257496  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257502  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257509  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257515  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257528  108297 shared_informer.go:227] caches populated
I1009 06:02:55.257542  108297 shared_informer.go:227] caches populated
I1009 06:02:55.262319  108297 httplog.go:90] POST /api/v1/nodes: (3.920677ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.263108  108297 node_tree.go:93] Added node "testnode" in group "" to NodeTree
I1009 06:02:55.268129  108297 httplog.go:90] PUT /api/v1/nodes/testnode/status: (4.914364ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.276164  108297 httplog.go:90] POST /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods: (7.450221ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.276747  108297 scheduling_queue.go:832] About to try and schedule pod node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pidpressure-fake-name
I1009 06:02:55.276771  108297 scheduler.go:578] Attempting to schedule pod: node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pidpressure-fake-name
I1009 06:02:55.276960  108297 scheduler_binder.go:257] AssumePodVolumes for pod "node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pidpressure-fake-name", node "testnode"
I1009 06:02:55.276980  108297 scheduler_binder.go:267] AssumePodVolumes for pod "node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pidpressure-fake-name", node "testnode": all PVCs bound and nothing to do
I1009 06:02:55.277055  108297 factory.go:697] Attempting to bind pidpressure-fake-name to testnode
I1009 06:02:55.285441  108297 httplog.go:90] POST /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name/binding: (7.879557ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.285892  108297 scheduler.go:710] pod node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pidpressure-fake-name is bound successfully on node "testnode", 1 nodes evaluated, 1 nodes were found feasible. Bound node resource: "Capacity: CPU<0>|Memory<0>|Pods<32>|StorageEphemeral<0>; Allocatable: CPU<0>|Memory<0>|Pods<32>|StorageEphemeral<0>.".
I1009 06:02:55.289533  108297 httplog.go:90] POST /apis/events.k8s.io/v1beta1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/events: (3.180116ms) 201 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.379826  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.612228ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.479880  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.72312ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.580945  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.675226ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.684389  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.246942ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.779794  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.655751ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.880229  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.009232ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:55.980160  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.948263ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.080581  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.168233ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.160719  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:56.160871  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:56.161323  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:56.161526  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:56.162546  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:56.165561  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:56.178760  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.79287ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.280008  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.732354ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.382957  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.720323ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.479827  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.674031ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.579402  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.226948ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.679825  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.648252ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.780360  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.069283ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.881443  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.111629ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:56.979628  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.31017ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.080225  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.912618ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.160989  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:57.161458  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:57.161683  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:57.161715  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:57.163421  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:57.165828  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:57.179781  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.519929ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.280164  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.978023ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.379647  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.533837ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.480741  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.331455ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.582318  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.69332ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.682019  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.522387ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.782106  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.673638ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.886141  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (8.844054ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:57.980197  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.912862ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.080216  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.027114ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.169683  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:58.169785  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:58.170460  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:58.171079  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:58.171114  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:58.171278  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:58.181406  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.279426ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.292530  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.896494ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.382989  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.732934ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.480638  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.46407ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.580571  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.393324ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.680282  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.119232ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.779688  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.533759ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.884861  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.514597ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:58.980305  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.992954ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.081534  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.804682ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.171355  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:59.171603  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:59.171625  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:59.171642  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:59.171691  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:59.172821  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:02:59.205374  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (15.004973ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.294626  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (7.57328ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.380690  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.415004ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.480680  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.446778ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.580261  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.026143ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.687274  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.134065ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.783315  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.433345ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.887418  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (7.313756ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:02:59.980890  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.594946ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.079533  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.204509ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.174510  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:00.174769  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:00.174794  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:00.174814  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:00.174858  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:00.174889  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:00.180922  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.806433ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.288302  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (10.884653ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.380525  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.320398ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.479921  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.745806ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.581358  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.98358ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.680579  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.080023ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.780065  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.617772ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.880650  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.41635ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:00.979873  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.71547ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.080000  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.790159ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.178691  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:01.178866  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:01.178889  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:01.178907  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:01.178945  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:01.178961  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:01.183679  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.514587ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.280395  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.44964ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.379591  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.422084ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.480139  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.902557ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.579569  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.414732ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.678515  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.50514ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.779240  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.995753ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.878661  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.696574ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:01.981080  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.991152ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.079149  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.05277ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.178907  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.9474ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.179403  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:02.179480  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:02.179535  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:02.179557  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:02.179569  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:02.179593  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:02.279808  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.755834ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.378675  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.689498ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.479067  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.09494ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.578597  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.552939ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.678423  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.521139ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.780172  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.846872ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.882393  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.955337ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:02.982212  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.515692ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.079377  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.394583ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.180082  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:03.180117  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:03.180090  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:03.180167  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:03.180216  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:03.180271  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:03.180573  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.344621ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.280209  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.877409ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.381599  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.392374ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
E1009 06:03:03.386622  108297 event_broadcaster.go:247] Unable to write event: 'Post http://127.0.0.1:39047/apis/events.k8s.io/v1beta1/namespaces/permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/events: dial tcp 127.0.0.1:39047: connect: connection refused' (may retry after sleeping)
I1009 06:03:03.479848  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.618417ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.582183  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.547558ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.678624  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.623732ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.779282  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.144128ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.879967  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.2975ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:03.981262  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.073611ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.086147  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (8.731748ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.179339  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.30988ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.180232  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:04.180283  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:04.180310  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:04.180318  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:04.180444  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:04.180507  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:04.280719  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.312263ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.379479  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.466823ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.479713  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.529607ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.583401  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.02266ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.680030  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.905254ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.780470  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.355831ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.881534  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.156641ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:04.980666  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.402685ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:05.072323  108297 httplog.go:90] GET /api/v1/namespaces/default: (3.032756ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:05.076214  108297 httplog.go:90] GET /api/v1/namespaces/default/services/kubernetes: (2.799862ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:05.079127  108297 httplog.go:90] GET /api/v1/namespaces/default/endpoints/kubernetes: (2.32409ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:05.079131  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.841533ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.180962  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.315161ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.181445  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:05.181477  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:05.181507  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:05.182209  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:05.182329  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:05.182380  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:05.281455  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.223582ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.381537  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.432847ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.479827  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.599493ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.582777  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.339261ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.680632  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.33534ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.780921  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.525833ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.880898  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.361914ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:05.980791  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.476144ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.080467  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.16545ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.180702  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.747224ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.181573  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:06.181641  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:06.181673  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:06.182383  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:06.182589  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:06.182609  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:06.283019  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.771605ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.381755  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.54781ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.480194  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.907265ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.579548  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.09132ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.681352  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.057251ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.779679  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.533604ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.880062  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.730465ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:06.980463  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.252736ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.080360  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.192583ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.179547  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.397913ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.181806  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:07.181819  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:07.181853  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:07.182589  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:07.182828  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:07.182915  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:07.280649  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.403153ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.380482  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.147649ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.480367  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.102571ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.578887  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.878827ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.679424  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.307026ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.780598  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.376475ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.880346  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.290815ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:07.978971  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.926217ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.079437  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.2874ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.181445  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.617434ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.182189  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:08.182266  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:08.182297  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:08.182966  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:08.183097  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:08.183162  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:08.281574  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.141456ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.379027  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.98547ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.486089  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (8.711852ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.579975  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.823023ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.680585  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.491473ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.779794  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.731122ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.879881  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.640375ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:08.981768  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.487463ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.083607  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (6.414607ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.181029  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.929511ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.182312  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:09.182416  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:09.182473  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:09.183135  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:09.183295  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:09.183321  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:09.280611  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.461013ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.380284  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.086823ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.479502  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.364579ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.581053  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.852214ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.679384  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.307511ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.779776  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.57949ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.879915  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.636402ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:09.981302  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.09621ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.080187  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.942357ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.181652  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.134722ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.182660  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:10.182703  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:10.182749  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:10.183490  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:10.183559  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:10.183655  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:10.281753  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.543224ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.379318  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.282443ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.481690  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.31571ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.581090  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.90488ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.682226  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.061434ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.782337  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.130007ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.879893  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.784659ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:10.980674  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.537161ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.082480  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.779097ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.180545  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.332476ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.182996  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:11.183064  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:11.183118  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:11.183704  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:11.183835  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:11.183878  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:11.279459  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.228137ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.379908  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.706033ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.479986  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.665545ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.580643  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.437041ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.679767  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.676945ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.781185  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.538618ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.879435  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.357161ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:11.979833  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.533444ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.082419  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.193574ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.180260  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.003699ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.183230  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:12.183281  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:12.183299  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:12.184018  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:12.190023  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:12.190015  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:12.282315  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.345316ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.382670  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.446139ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.480041  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.872821ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.919511  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (28.864842ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:12.980617  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.932848ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.087967  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (9.149173ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.182503  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.077611ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.183681  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:13.183652  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:13.183660  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:13.184233  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:13.190253  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:13.190253  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:13.280830  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.015361ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.394486  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (17.395862ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.480194  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.89476ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
E1009 06:03:13.505160  108297 event_broadcaster.go:247] Unable to write event: 'Post http://127.0.0.1:39047/apis/events.k8s.io/v1beta1/namespaces/permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/events: dial tcp 127.0.0.1:39047: connect: connection refused' (may retry after sleeping)
I1009 06:03:13.580926  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.163943ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.679165  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.074109ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.780506  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.198325ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.885908  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (8.56434ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:13.982086  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.924952ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.079789  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.469255ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.183954  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:14.184046  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:14.185054  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.851694ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.185637  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:14.185670  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:14.190693  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:14.190693  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:14.279332  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.299028ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.379481  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.097497ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.480716  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.399997ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.579414  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.321478ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.685220  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (6.802119ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.779656  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.459507ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.885097  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (6.821598ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:14.980515  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.368582ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:15.071923  108297 httplog.go:90] GET /api/v1/namespaces/default: (2.264762ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:15.075090  108297 httplog.go:90] GET /api/v1/namespaces/default/services/kubernetes: (2.633108ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:15.077854  108297 httplog.go:90] GET /api/v1/namespaces/default/endpoints/kubernetes: (2.060041ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:15.079445  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.483435ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.180330  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.192126ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.184244  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:15.184300  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:15.185850  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:15.186119  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:15.190913  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:15.191184  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:15.279111  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.050754ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.379607  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.5443ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.480079  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.448785ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.579400  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.306381ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.679587  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.479256ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.780407  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.137622ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.880876  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.353397ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:15.979342  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.285938ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.079341  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.297581ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.179637  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.518552ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.184470  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:16.184551  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:16.186068  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:16.186230  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:16.191118  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:16.191537  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:16.280633  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.790727ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.379328  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.199163ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.480088  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.96063ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.580499  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.85215ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.680137  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.940029ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.779802  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.580829ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.879946  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.878263ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:16.981817  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.364141ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.079829  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.707653ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.182845  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.947483ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.184602  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:17.184881  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:17.186195  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:17.186365  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:17.191348  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:17.191833  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:17.279680  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.190191ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.379325  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.202281ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.479292  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.020778ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.580366  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.657119ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.680036  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.949722ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.780212  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.037835ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.881566  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.469604ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:17.980221  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.021922ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.092103  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.017381ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.179863  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.054608ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.184787  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:18.185056  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:18.186483  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:18.186570  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:18.191565  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:18.192029  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:18.280310  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.002155ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.381422  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.044739ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.479879  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.657442ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.581289  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.084872ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.679420  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.268583ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.779642  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.506127ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.883999  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.456304ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:18.979393  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.184466ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.079579  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.448407ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.180742  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.475156ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.185027  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:19.185339  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:19.187270  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:19.187335  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:19.191956  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:19.192253  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:19.280267  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.703439ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
E1009 06:03:19.337475  108297 factory.go:681] Error getting pod permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/test-pod for retry: Get http://127.0.0.1:39047/api/v1/namespaces/permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/pods/test-pod: dial tcp 127.0.0.1:39047: connect: connection refused; retrying...
I1009 06:03:19.380005  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.742958ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.480571  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.444746ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.582164  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (4.916687ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.682229  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.885436ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.788041  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (10.414231ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.879992  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.873209ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:19.982804  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.569198ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.080708  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.525727ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.180432  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.263451ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.185440  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:20.185511  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:20.187640  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:20.187707  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:20.192163  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:20.192414  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:20.283491  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.900435ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.380690  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.526937ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.485213  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.804195ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.590856  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.306121ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.680977  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.795942ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.780563  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.286475ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.880582  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.363014ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:20.981133  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.590081ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.080209  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.795682ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.180248  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.123032ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.185668  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:21.185751  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:21.187825  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:21.187892  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:21.192371  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:21.192611  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:21.284506  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.528459ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.379695  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.561346ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.487476  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (10.372456ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.586865  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (9.554507ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.679837  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.555927ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.780088  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.340214ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.881067  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.308524ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:21.979413  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.374277ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.080895  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.581798ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.182571  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.273078ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.186227  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:22.186292  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:22.188593  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:22.188797  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:22.192577  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:22.192969  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:22.279638  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.561039ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.380071  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.902929ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.481221  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.081996ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.579481  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.4362ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.678974  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.883705ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.779755  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.566454ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.880491  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.182014ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:22.979895  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.696568ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.080167  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.87229ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.179540  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.819218ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.186454  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:23.186514  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:23.189024  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:23.189025  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:23.193598  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:23.193840  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:23.280046  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.854726ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.379466  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.390595ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.480441  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.223934ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.578822  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (1.794678ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.679832  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.398304ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.779299  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.249155ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.880626  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.284915ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:23.979930  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.82484ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.080244  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.058191ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.180480  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.267825ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.186700  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:24.186789  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:24.190138  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:24.190321  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:24.194215  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:24.194975  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:24.281131  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.440203ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.379922  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.75439ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.479523  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.374236ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.579698  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.548773ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.679846  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.585512ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.780311  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.090635ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.880279  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.075702ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:24.979919  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (2.507509ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
E1009 06:03:24.997111  108297 event_broadcaster.go:247] Unable to write event: 'Post http://127.0.0.1:39047/apis/events.k8s.io/v1beta1/namespaces/permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/events: dial tcp 127.0.0.1:39047: connect: connection refused' (may retry after sleeping)
I1009 06:03:25.074391  108297 httplog.go:90] GET /api/v1/namespaces/default: (4.272849ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:25.080131  108297 httplog.go:90] GET /api/v1/namespaces/default/services/kubernetes: (4.588791ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:25.082128  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (5.076639ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.083322  108297 httplog.go:90] GET /api/v1/namespaces/default/endpoints/kubernetes: (2.208125ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40172]
I1009 06:03:25.180506  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.186827ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.187089  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:25.187271  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:25.190361  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:25.190747  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:25.194436  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:25.195072  108297 reflector.go:268] k8s.io/client-go/informers/factory.go:134: forcing resync
I1009 06:03:25.280936  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.758097ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.288383  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (6.761601ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.299998  108297 httplog.go:90] DELETE /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (10.770136ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.305574  108297 httplog.go:90] GET /api/v1/namespaces/node-pid-pressure6ba4ff6f-d87d-4b7e-abe9-a35a9907b211/pods/pidpressure-fake-name: (3.785507ms) 404 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.307054  108297 httplog.go:90] GET /api/v1/replicationcontrollers?allowWatchBookmarks=true&resourceVersion=30241&timeout=5m40s&timeoutSeconds=340&watch=true: (30.145826792s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40186]
I1009 06:03:25.307161  108297 httplog.go:90] GET /api/v1/persistentvolumeclaims?allowWatchBookmarks=true&resourceVersion=30241&timeout=7m41s&timeoutSeconds=461&watch=true: (30.146498668s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39704]
I1009 06:03:25.307251  108297 httplog.go:90] GET /api/v1/nodes?allowWatchBookmarks=true&resourceVersion=30241&timeout=6m37s&timeoutSeconds=397&watch=true: (30.145987314s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40176]
I1009 06:03:25.307265  108297 httplog.go:90] GET /apis/policy/v1beta1/poddisruptionbudgets?allowWatchBookmarks=true&resourceVersion=30241&timeout=9m4s&timeoutSeconds=544&watch=true: (30.145629135s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40182]
I1009 06:03:25.307054  108297 httplog.go:90] GET /apis/apps/v1/replicasets?allowWatchBookmarks=true&resourceVersion=30241&timeout=6m20s&timeoutSeconds=380&watch=true: (30.144554744s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40170]
I1009 06:03:25.307362  108297 httplog.go:90] GET /apis/storage.k8s.io/v1beta1/csinodes?allowWatchBookmarks=true&resourceVersion=30241&timeout=7m58s&timeoutSeconds=478&watch=true: (30.141975462s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40192]
I1009 06:03:25.307438  108297 httplog.go:90] GET /api/v1/services?allowWatchBookmarks=true&resourceVersion=30359&timeout=6m39s&timeoutSeconds=399&watch=true: (30.145400474s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40188]
I1009 06:03:25.307490  108297 httplog.go:90] GET /api/v1/persistentvolumes?allowWatchBookmarks=true&resourceVersion=30241&timeout=6m11s&timeoutSeconds=371&watch=true: (30.145697029s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40178]
I1009 06:03:25.307539  108297 httplog.go:90] GET /apis/storage.k8s.io/v1/storageclasses?allowWatchBookmarks=true&resourceVersion=30241&timeout=7m10s&timeoutSeconds=430&watch=true: (30.145750748s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40190]
I1009 06:03:25.307593  108297 httplog.go:90] GET /api/v1/pods?allowWatchBookmarks=true&fieldSelector=status.phase%21%3DFailed%2Cstatus.phase%21%3DSucceeded&resourceVersion=30241&timeoutSeconds=304&watch=true: (30.24274717s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:39702]
I1009 06:03:25.307699  108297 httplog.go:90] GET /apis/apps/v1/statefulsets?allowWatchBookmarks=true&resourceVersion=30241&timeout=5m38s&timeoutSeconds=338&watch=true: (30.145417398s) 0 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40180]
I1009 06:03:25.315119  108297 httplog.go:90] DELETE /api/v1/nodes: (7.489094ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.315347  108297 controller.go:185] Shutting down kubernetes service endpoint reconciler
I1009 06:03:25.317490  108297 httplog.go:90] GET /api/v1/namespaces/default/endpoints/kubernetes: (1.892945ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
I1009 06:03:25.320512  108297 httplog.go:90] PUT /api/v1/namespaces/default/endpoints/kubernetes: (2.547095ms) 200 [scheduler.test/v0.0.0 (linux/amd64) kubernetes/$Format 127.0.0.1:40564]
--- FAIL: TestNodePIDPressure (34.36s)
    predicates_test.go:924: Test Failed: error, timed out waiting for the condition, while waiting for scheduled

				from junit_d965d8661547eb73cabe6d94d5550ec333e4c0fa_20191009-055342.xml

Find permit-pluginff91ade3-86ba-4b97-889a-c15170d4c419/test-pod mentions in log files | View test history on testgrid


Show 2898 Passed Tests

Show 4 Skipped Tests

Error lines from build-log.txt

... skipping 855 lines ...
W1009 05:47:02.673] I1009 05:47:02.673271   52917 controllermanager.go:534] Started "namespace"
W1009 05:47:02.674] I1009 05:47:02.673366   52917 namespace_controller.go:200] Starting namespace controller
W1009 05:47:02.675] I1009 05:47:02.673823   52917 shared_informer.go:197] Waiting for caches to sync for namespace
W1009 05:47:02.675] I1009 05:47:02.674429   52917 controllermanager.go:534] Started "serviceaccount"
W1009 05:47:02.675] I1009 05:47:02.674450   52917 serviceaccounts_controller.go:116] Starting service account controller
W1009 05:47:02.676] I1009 05:47:02.674648   52917 shared_informer.go:197] Waiting for caches to sync for service account
W1009 05:47:02.676] E1009 05:47:02.675602   52917 core.go:79] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail
W1009 05:47:02.676] W1009 05:47:02.675828   52917 controllermanager.go:526] Skipping "service"
W1009 05:47:02.677] I1009 05:47:02.676383   52917 node_lifecycle_controller.go:77] Sending events to api server
W1009 05:47:02.677] E1009 05:47:02.676639   52917 core.go:202] failed to start cloud node lifecycle controller: no cloud provider provided
W1009 05:47:02.677] W1009 05:47:02.676920   52917 controllermanager.go:526] Skipping "cloud-node-lifecycle"
W1009 05:47:02.678] W1009 05:47:02.677074   52917 controllermanager.go:526] Skipping "root-ca-cert-publisher"
W1009 05:47:02.679] I1009 05:47:02.678965   52917 controllermanager.go:534] Started "replicationcontroller"
W1009 05:47:02.679] I1009 05:47:02.679069   52917 replica_set.go:182] Starting replicationcontroller controller
W1009 05:47:02.680] I1009 05:47:02.679343   52917 shared_informer.go:197] Waiting for caches to sync for ReplicationController
W1009 05:47:02.680] I1009 05:47:02.679826   52917 controllermanager.go:534] Started "podgc"
... skipping 46 lines ...
W1009 05:47:02.906] I1009 05:47:02.891607   52917 disruption.go:333] Starting disruption controller
W1009 05:47:02.906] I1009 05:47:02.891633   52917 shared_informer.go:197] Waiting for caches to sync for disruption
W1009 05:47:02.907] I1009 05:47:02.891925   52917 controllermanager.go:534] Started "csrcleaner"
W1009 05:47:02.907] I1009 05:47:02.892706   52917 cleaner.go:81] Starting CSR cleaner controller
W1009 05:47:02.926] The Service "kubernetes" is invalid: spec.clusterIP: Invalid value: "10.0.0.1": provided IP is already allocated
W1009 05:47:02.945] I1009 05:47:02.942215   52917 shared_informer.go:204] Caches are synced for ClusterRoleAggregator 
W1009 05:47:02.958] W1009 05:47:02.957557   52917 actual_state_of_world.go:506] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist
W1009 05:47:02.968] I1009 05:47:02.967195   52917 shared_informer.go:204] Caches are synced for TTL 
W1009 05:47:02.974] I1009 05:47:02.974040   52917 shared_informer.go:204] Caches are synced for namespace 
W1009 05:47:02.975] I1009 05:47:02.974839   52917 shared_informer.go:204] Caches are synced for service account 
W1009 05:47:02.979] I1009 05:47:02.979294   49360 controller.go:606] quota admission added evaluator for: serviceaccounts
W1009 05:47:02.980] I1009 05:47:02.979623   52917 shared_informer.go:204] Caches are synced for ReplicationController 
W1009 05:47:02.981] I1009 05:47:02.980595   52917 shared_informer.go:204] Caches are synced for GC 
... skipping 102 lines ...
I1009 05:47:07.773] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:47:07.776] +++ command: run_RESTMapper_evaluation_tests
I1009 05:47:07.791] +++ [1009 05:47:07] Creating namespace namespace-1570600027-3425
I1009 05:47:07.888] namespace/namespace-1570600027-3425 created
I1009 05:47:07.995] Context "test" modified.
I1009 05:47:08.009] +++ [1009 05:47:08] Testing RESTMapper
I1009 05:47:08.161] +++ [1009 05:47:08] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype"
I1009 05:47:08.187] +++ exit code: 0
I1009 05:47:08.366] NAME                              SHORTNAMES   APIGROUP                       NAMESPACED   KIND
I1009 05:47:08.367] bindings                                                                      true         Binding
I1009 05:47:08.367] componentstatuses                 cs                                          false        ComponentStatus
I1009 05:47:08.367] configmaps                        cm                                          true         ConfigMap
I1009 05:47:08.368] endpoints                         ep                                          true         Endpoints
... skipping 317 lines ...
I1009 05:47:27.129] (Bcore.sh:79: Successful get pods/valid-pod {{.metadata.name}}: valid-pod
I1009 05:47:27.259] (Bcore.sh:81: Successful get pods {.items[*].metadata.name}: valid-pod
I1009 05:47:27.394] (Bcore.sh:82: Successful get pod valid-pod {.metadata.name}: valid-pod
I1009 05:47:27.526] (Bcore.sh:83: Successful get pod/valid-pod {.metadata.name}: valid-pod
I1009 05:47:27.657] (Bcore.sh:84: Successful get pods/valid-pod {.metadata.name}: valid-pod
I1009 05:47:27.799] (B
I1009 05:47:27.807] core.sh:86: FAIL!
I1009 05:47:27.808] Describe pods valid-pod
I1009 05:47:27.808]   Expected Match: Name:
I1009 05:47:27.808]   Not found in:
I1009 05:47:27.808] Name:         valid-pod
I1009 05:47:27.808] Namespace:    namespace-1570600045-26344
I1009 05:47:27.808] Priority:     0
... skipping 108 lines ...
I1009 05:47:28.277] QoS Class:        Guaranteed
I1009 05:47:28.277] Node-Selectors:   <none>
I1009 05:47:28.277] Tolerations:      <none>
I1009 05:47:28.277] Events:           <none>
I1009 05:47:28.278] (B
I1009 05:47:28.423] 
I1009 05:47:28.424] FAIL!
I1009 05:47:28.424] Describe pods
I1009 05:47:28.424]   Expected Match: Name:
I1009 05:47:28.424]   Not found in:
I1009 05:47:28.424] Name:         valid-pod
I1009 05:47:28.424] Namespace:    namespace-1570600045-26344
I1009 05:47:28.424] Priority:     0
... skipping 158 lines ...
I1009 05:47:34.044] core.sh:186: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:47:34.275] (Bcore.sh:190: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:47:34.403] (Bcore.sh:194: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:47:34.658] (Bcore.sh:198: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:47:34.787] (Bcore.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:47:34.904] (Bpod "valid-pod" force deleted
W1009 05:47:35.005] error: resource(s) were provided, but no name, label selector, or --all flag specified
W1009 05:47:35.006] error: setting 'all' parameter but found a non empty selector. 
W1009 05:47:35.006] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I1009 05:47:35.107] core.sh:206: Successful get pods -l'name in (valid-pod)' {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:47:35.181] (Bcore.sh:211: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-kubectl-describe-pod\" }}found{{end}}{{end}}:: :
I1009 05:47:35.282] (Bnamespace/test-kubectl-describe-pod created
I1009 05:47:35.418] core.sh:215: Successful get namespaces/test-kubectl-describe-pod {{.metadata.name}}: test-kubectl-describe-pod
I1009 05:47:35.544] (Bcore.sh:219: Successful get secrets --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 11 lines ...
I1009 05:47:36.866] (Bpoddisruptionbudget.policy/test-pdb-3 created
I1009 05:47:37.022] core.sh:251: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2
I1009 05:47:37.125] (Bpoddisruptionbudget.policy/test-pdb-4 created
I1009 05:47:37.269] core.sh:255: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50%
I1009 05:47:37.503] (Bcore.sh:261: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:47:37.776] (Bpod/env-test-pod created
W1009 05:47:37.877] error: min-available and max-unavailable cannot be both specified
I1009 05:47:37.978] 
I1009 05:47:37.979] core.sh:264: FAIL!
I1009 05:47:37.979] Describe pods --namespace=test-kubectl-describe-pod env-test-pod
I1009 05:47:37.979]   Expected Match: TEST_CMD_1
I1009 05:47:37.979]   Not found in:
I1009 05:47:37.979] Name:         env-test-pod
I1009 05:47:37.980] Namespace:    test-kubectl-describe-pod
I1009 05:47:37.980] Priority:     0
... skipping 23 lines ...
I1009 05:47:37.984] Tolerations:       <none>
I1009 05:47:37.984] Events:            <none>
I1009 05:47:37.985] (B
I1009 05:47:37.985] 264 /go/src/k8s.io/kubernetes/test/cmd/../../test/cmd/core.sh
I1009 05:47:37.985] (B
I1009 05:47:38.117] 
I1009 05:47:38.118] FAIL!
I1009 05:47:38.118] Describe pods --namespace=test-kubectl-describe-pod
I1009 05:47:38.118]   Expected Match: TEST_CMD_1
I1009 05:47:38.118]   Not found in:
I1009 05:47:38.118] Name:         env-test-pod
I1009 05:47:38.118] Namespace:    test-kubectl-describe-pod
I1009 05:47:38.118] Priority:     0
... skipping 150 lines ...
I1009 05:47:55.514] (Bpod/valid-pod patched
I1009 05:47:55.660] core.sh:470: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: changed-with-yaml:
I1009 05:47:55.769] (Bpod/valid-pod patched
I1009 05:47:55.914] core.sh:475: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.1:
I1009 05:47:56.142] (Bpod/valid-pod patched
I1009 05:47:56.304] core.sh:491: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I1009 05:47:56.565] (B+++ [1009 05:47:56] "kubectl patch with resourceVersion 510" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again
I1009 05:47:56.947] pod "valid-pod" deleted
I1009 05:47:56.961] pod/valid-pod replaced
I1009 05:47:57.118] core.sh:515: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname
I1009 05:47:57.374] (BSuccessful
I1009 05:47:57.375] message:error: --grace-period must have --force specified
I1009 05:47:57.375] has:\-\-grace-period must have \-\-force specified
I1009 05:47:57.651] Successful
I1009 05:47:57.652] message:error: --timeout must have --force specified
I1009 05:47:57.652] has:\-\-timeout must have \-\-force specified
I1009 05:47:57.897] node/node-v1-test created
W1009 05:47:57.999] W1009 05:47:57.898099   52917 actual_state_of_world.go:506] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist
W1009 05:47:58.037] I1009 05:47:58.036318   52917 event.go:262] Event(v1.ObjectReference{Kind:"Node", Namespace:"", Name:"node-v1-test", UID:"521f36d3-f16a-44bc-aa1b-0c9908b15617", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RegisteredNode' Node node-v1-test event: Registered Node node-v1-test in Controller
I1009 05:47:58.183] node/node-v1-test replaced
I1009 05:47:58.359] core.sh:552: Successful get node node-v1-test {{.metadata.annotations.a}}: b
I1009 05:47:58.470] (Bnode "node-v1-test" deleted
I1009 05:47:58.626] core.sh:559: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I1009 05:47:59.068] (Bcore.sh:562: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/serve_hostname:
... skipping 26 lines ...
I1009 05:48:01.198]     name: kubernetes-pause
I1009 05:48:01.198] has:localonlyvalue
I1009 05:48:01.256] core.sh:585: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I1009 05:48:01.510] (Bcore.sh:589: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I1009 05:48:01.640] (Bcore.sh:593: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I1009 05:48:01.761] (Bpod/valid-pod labeled
W1009 05:48:01.862] error: 'name' already has a value (valid-pod), and --overwrite is false
I1009 05:48:01.963] core.sh:597: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod-super-sayan
I1009 05:48:02.033] (Bcore.sh:601: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:48:02.152] (Bpod "valid-pod" force deleted
W1009 05:48:02.253] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I1009 05:48:02.354] core.sh:605: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:02.355] (B+++ [1009 05:48:02] Creating namespace namespace-1570600082-29442
... skipping 83 lines ...
I1009 05:48:12.927] +++ Running case: test-cmd.run_kubectl_create_error_tests 
I1009 05:48:12.932] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:48:12.936] +++ command: run_kubectl_create_error_tests
I1009 05:48:12.951] +++ [1009 05:48:12] Creating namespace namespace-1570600092-2273
I1009 05:48:13.059] namespace/namespace-1570600092-2273 created
I1009 05:48:13.165] Context "test" modified.
I1009 05:48:13.180] +++ [1009 05:48:13] Testing kubectl create with error
W1009 05:48:13.281] Error: must specify one of -f and -k
W1009 05:48:13.282] 
W1009 05:48:13.282] Create a resource from a file or from stdin.
W1009 05:48:13.282] 
W1009 05:48:13.282]  JSON and YAML formats are accepted.
W1009 05:48:13.282] 
W1009 05:48:13.282] Examples:
... skipping 41 lines ...
W1009 05:48:13.288] 
W1009 05:48:13.288] Usage:
W1009 05:48:13.288]   kubectl create -f FILENAME [options]
W1009 05:48:13.288] 
W1009 05:48:13.288] Use "kubectl <command> --help" for more information about a given command.
W1009 05:48:13.288] Use "kubectl options" for a list of global command-line options (applies to all commands).
I1009 05:48:13.559] +++ [1009 05:48:13] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false
W1009 05:48:13.659] kubectl convert is DEPRECATED and will be removed in a future version.
W1009 05:48:13.660] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
I1009 05:48:13.816] +++ exit code: 0
I1009 05:48:13.878] Recording: run_kubectl_apply_tests
I1009 05:48:13.878] Running command: run_kubectl_apply_tests
I1009 05:48:13.917] 
... skipping 17 lines ...
I1009 05:48:16.486] (Bpod "test-pod" deleted
I1009 05:48:16.820] customresourcedefinition.apiextensions.k8s.io/resources.mygroup.example.com created
W1009 05:48:17.364] I1009 05:48:17.363365   49360 client.go:361] parsed scheme: "endpoint"
W1009 05:48:17.365] I1009 05:48:17.363476   49360 endpoint.go:66] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379 0  <nil>}]
W1009 05:48:17.376] I1009 05:48:17.375376   49360 controller.go:606] quota admission added evaluator for: resources.mygroup.example.com
I1009 05:48:17.477] kind.mygroup.example.com/myobj serverside-applied (server dry run)
W1009 05:48:17.581] Error from server (NotFound): resources.mygroup.example.com "myobj" not found
I1009 05:48:17.722] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
I1009 05:48:17.768] +++ exit code: 0
I1009 05:48:17.828] Recording: run_kubectl_run_tests
I1009 05:48:17.829] Running command: run_kubectl_run_tests
I1009 05:48:17.873] 
I1009 05:48:17.879] +++ Running case: test-cmd.run_kubectl_run_tests 
... skipping 7 lines ...
I1009 05:48:18.416] (Bjob.batch/pi created
W1009 05:48:18.517] kubectl run --generator=job/v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W1009 05:48:18.518] I1009 05:48:18.403663   49360 controller.go:606] quota admission added evaluator for: jobs.batch
W1009 05:48:18.518] I1009 05:48:18.421343   52917 event.go:262] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1570600097-360", Name:"pi", UID:"95e02cf9-f518-439c-8209-5c12e163fc66", APIVersion:"batch/v1", ResourceVersion:"527", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: pi-4wprk
I1009 05:48:18.620] run.sh:33: Successful get jobs {{range.items}}{{.metadata.name}}:{{end}}: pi:
I1009 05:48:18.725] (B
I1009 05:48:18.726] FAIL!
I1009 05:48:18.726] Describe pods
I1009 05:48:18.726]   Expected Match: Name:
I1009 05:48:18.727]   Not found in:
I1009 05:48:18.727] Name:           pi-4wprk
I1009 05:48:18.727] Namespace:      namespace-1570600097-360
I1009 05:48:18.727] Priority:       0
... skipping 83 lines ...
I1009 05:48:21.882] Context "test" modified.
I1009 05:48:21.898] +++ [1009 05:48:21] Testing kubectl create filter
I1009 05:48:22.036] create.sh:30: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:22.335] (Bpod/selector-test-pod created
I1009 05:48:22.502] create.sh:34: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
I1009 05:48:22.635] (BSuccessful
I1009 05:48:22.635] message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
I1009 05:48:22.636] has:pods "selector-test-pod-dont-apply" not found
I1009 05:48:22.745] pod "selector-test-pod" deleted
I1009 05:48:22.782] +++ exit code: 0
I1009 05:48:22.837] Recording: run_kubectl_apply_deployments_tests
I1009 05:48:22.838] Running command: run_kubectl_apply_deployments_tests
I1009 05:48:22.872] 
... skipping 29 lines ...
W1009 05:48:26.185] I1009 05:48:26.088517   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1570600102-21965", Name:"nginx", UID:"9ccece7d-8174-4162-91e8-668f33517142", APIVersion:"apps/v1", ResourceVersion:"600", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-8484dd655 to 3
W1009 05:48:26.186] I1009 05:48:26.093706   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600102-21965", Name:"nginx-8484dd655", UID:"32ee8167-c0c8-444a-830e-67d3a8859566", APIVersion:"apps/v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-8484dd655-dkp9n
W1009 05:48:26.186] I1009 05:48:26.097292   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600102-21965", Name:"nginx-8484dd655", UID:"32ee8167-c0c8-444a-830e-67d3a8859566", APIVersion:"apps/v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-8484dd655-rkr92
W1009 05:48:26.186] I1009 05:48:26.097647   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600102-21965", Name:"nginx-8484dd655", UID:"32ee8167-c0c8-444a-830e-67d3a8859566", APIVersion:"apps/v1", ResourceVersion:"601", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-8484dd655-4lwhq
I1009 05:48:26.287] apps.sh:148: Successful get deployment nginx {{.metadata.name}}: nginx
I1009 05:48:30.612] (BSuccessful
I1009 05:48:30.612] message:Error from server (Conflict): error when applying patch:
I1009 05:48:30.613] {"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1570600102-21965\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}}
I1009 05:48:30.613] to:
I1009 05:48:30.614] Resource: "apps/v1, Resource=deployments", GroupVersionKind: "apps/v1, Kind=Deployment"
I1009 05:48:30.614] Name: "nginx", Namespace: "namespace-1570600102-21965"
I1009 05:48:30.616] Object: &{map["apiVersion":"apps/v1" "kind":"Deployment" "metadata":map["annotations":map["deployment.kubernetes.io/revision":"1" "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1570600102-21965\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx1\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx1\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"] "creationTimestamp":"2019-10-09T05:48:26Z" "generation":'\x01' "labels":map["name":"nginx"] "name":"nginx" "namespace":"namespace-1570600102-21965" "resourceVersion":"613" "selfLink":"/apis/apps/v1/namespaces/namespace-1570600102-21965/deployments/nginx" "uid":"9ccece7d-8174-4162-91e8-668f33517142"] "spec":map["progressDeadlineSeconds":'\u0258' "replicas":'\x03' "revisionHistoryLimit":'\n' "selector":map["matchLabels":map["name":"nginx1"]] "strategy":map["rollingUpdate":map["maxSurge":"25%" "maxUnavailable":"25%"] "type":"RollingUpdate"] "template":map["metadata":map["creationTimestamp":<nil> "labels":map["name":"nginx1"]] "spec":map["containers":[map["image":"k8s.gcr.io/nginx:test-cmd" "imagePullPolicy":"IfNotPresent" "name":"nginx" "ports":[map["containerPort":'P' "protocol":"TCP"]] "resources":map[] "terminationMessagePath":"/dev/termination-log" "terminationMessagePolicy":"File"]] "dnsPolicy":"ClusterFirst" "restartPolicy":"Always" "schedulerName":"default-scheduler" "securityContext":map[] "terminationGracePeriodSeconds":'\x1e']]] "status":map["conditions":[map["lastTransitionTime":"2019-10-09T05:48:26Z" "lastUpdateTime":"2019-10-09T05:48:26Z" "message":"Deployment does not have minimum availability." "reason":"MinimumReplicasUnavailable" "status":"False" "type":"Available"] map["lastTransitionTime":"2019-10-09T05:48:26Z" "lastUpdateTime":"2019-10-09T05:48:26Z" "message":"ReplicaSet \"nginx-8484dd655\" is progressing." "reason":"ReplicaSetUpdated" "status":"True" "type":"Progressing"]] "observedGeneration":'\x01' "replicas":'\x03' "unavailableReplicas":'\x03' "updatedReplicas":'\x03']]}
I1009 05:48:30.617] for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.apps "nginx": the object has been modified; please apply your changes to the latest version and try again
I1009 05:48:30.617] has:Error from server (Conflict)
W1009 05:48:30.718] I1009 05:48:27.037614   52917 horizontal.go:341] Horizontal Pod Autoscaler frontend has been deleted in namespace-1570600089-19647
I1009 05:48:35.956] deployment.apps/nginx configured
W1009 05:48:36.057] I1009 05:48:35.961700   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1570600102-21965", Name:"nginx", UID:"9a512b87-206f-40a3-a9e9-4c169be91d3d", APIVersion:"apps/v1", ResourceVersion:"637", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-668b6c7744 to 3
W1009 05:48:36.059] I1009 05:48:35.966844   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600102-21965", Name:"nginx-668b6c7744", UID:"592021d5-9ab3-4cbb-a57a-ffa605fb689d", APIVersion:"apps/v1", ResourceVersion:"638", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-668b6c7744-pb8sn
W1009 05:48:36.060] I1009 05:48:35.970513   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600102-21965", Name:"nginx-668b6c7744", UID:"592021d5-9ab3-4cbb-a57a-ffa605fb689d", APIVersion:"apps/v1", ResourceVersion:"638", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-668b6c7744-rj7qt
W1009 05:48:36.060] I1009 05:48:35.971453   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600102-21965", Name:"nginx-668b6c7744", UID:"592021d5-9ab3-4cbb-a57a-ffa605fb689d", APIVersion:"apps/v1", ResourceVersion:"638", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-668b6c7744-7k2kg
... skipping 142 lines ...
I1009 05:48:44.924] +++ [1009 05:48:44] Creating namespace namespace-1570600124-125
I1009 05:48:45.029] namespace/namespace-1570600124-125 created
I1009 05:48:45.157] Context "test" modified.
I1009 05:48:45.173] +++ [1009 05:48:45] Testing kubectl get
I1009 05:48:45.308] get.sh:29: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:45.434] (BSuccessful
I1009 05:48:45.434] message:Error from server (NotFound): pods "abc" not found
I1009 05:48:45.434] has:pods "abc" not found
I1009 05:48:45.558] get.sh:37: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:45.676] (BSuccessful
I1009 05:48:45.677] message:Error from server (NotFound): pods "abc" not found
I1009 05:48:45.677] has:pods "abc" not found
I1009 05:48:45.805] get.sh:45: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:45.926] (BSuccessful
I1009 05:48:45.927] message:{
I1009 05:48:45.927]     "apiVersion": "v1",
I1009 05:48:45.927]     "items": [],
... skipping 23 lines ...
I1009 05:48:46.442] has not:No resources found
I1009 05:48:46.571] Successful
I1009 05:48:46.571] message:NAME
I1009 05:48:46.571] has not:No resources found
I1009 05:48:46.701] get.sh:73: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:46.839] (BSuccessful
I1009 05:48:46.840] message:error: the server doesn't have a resource type "foobar"
I1009 05:48:46.840] has not:No resources found
I1009 05:48:46.956] Successful
I1009 05:48:46.957] message:No resources found in namespace-1570600124-125 namespace.
I1009 05:48:46.957] has:No resources found
I1009 05:48:47.084] Successful
I1009 05:48:47.084] message:
I1009 05:48:47.084] has not:No resources found
I1009 05:48:47.201] Successful
I1009 05:48:47.201] message:No resources found in namespace-1570600124-125 namespace.
I1009 05:48:47.201] has:No resources found
I1009 05:48:47.322] get.sh:93: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:47.439] (BSuccessful
I1009 05:48:47.439] message:Error from server (NotFound): pods "abc" not found
I1009 05:48:47.440] has:pods "abc" not found
I1009 05:48:47.443] FAIL!
I1009 05:48:47.443] message:Error from server (NotFound): pods "abc" not found
I1009 05:48:47.443] has not:List
I1009 05:48:47.444] 99 /go/src/k8s.io/kubernetes/test/cmd/../../test/cmd/get.sh
I1009 05:48:47.606] Successful
I1009 05:48:47.607] message:I1009 05:48:47.537661   62740 loader.go:375] Config loaded from file:  /tmp/tmp.Mjy9Cwy53i/.kube/config
I1009 05:48:47.607] I1009 05:48:47.539364   62740 round_trippers.go:443] GET http://127.0.0.1:8080/version?timeout=32s 200 OK in 1 milliseconds
I1009 05:48:47.607] I1009 05:48:47.568644   62740 round_trippers.go:443] GET http://127.0.0.1:8080/api/v1/namespaces/default/pods 200 OK in 4 milliseconds
... skipping 660 lines ...
I1009 05:48:53.684] Successful
I1009 05:48:53.684] message:NAME    DATA   AGE
I1009 05:48:53.685] one     0      0s
I1009 05:48:53.685] three   0      0s
I1009 05:48:53.685] two     0      0s
I1009 05:48:53.685] STATUS    REASON          MESSAGE
I1009 05:48:53.685] Failure   InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I1009 05:48:53.685] has not:watch is only supported on individual resources
I1009 05:48:54.822] Successful
I1009 05:48:54.822] message:STATUS    REASON          MESSAGE
I1009 05:48:54.822] Failure   InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I1009 05:48:54.823] has not:watch is only supported on individual resources
I1009 05:48:54.832] +++ [1009 05:48:54] Creating namespace namespace-1570600134-4071
I1009 05:48:54.947] namespace/namespace-1570600134-4071 created
I1009 05:48:55.059] Context "test" modified.
I1009 05:48:55.211] get.sh:157: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:48:55.459] (Bpod/valid-pod created
... skipping 56 lines ...
I1009 05:48:55.588] }
I1009 05:48:55.723] get.sh:162: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:48:56.069] (B<no value>Successful
I1009 05:48:56.069] message:valid-pod:
I1009 05:48:56.069] has:valid-pod:
I1009 05:48:56.186] Successful
I1009 05:48:56.186] message:error: error executing jsonpath "{.missing}": Error executing template: missing is not found. Printing more information for debugging the template:
I1009 05:48:56.187] 	template was:
I1009 05:48:56.187] 		{.missing}
I1009 05:48:56.187] 	object given to jsonpath engine was:
I1009 05:48:56.188] 		map[string]interface {}{"apiVersion":"v1", "kind":"Pod", "metadata":map[string]interface {}{"creationTimestamp":"2019-10-09T05:48:55Z", "labels":map[string]interface {}{"name":"valid-pod"}, "name":"valid-pod", "namespace":"namespace-1570600134-4071", "resourceVersion":"716", "selfLink":"/api/v1/namespaces/namespace-1570600134-4071/pods/valid-pod", "uid":"627b1052-83e6-4083-83d3-8d4576183868"}, "spec":map[string]interface {}{"containers":[]interface {}{map[string]interface {}{"image":"k8s.gcr.io/serve_hostname", "imagePullPolicy":"Always", "name":"kubernetes-serve-hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File"}}, "dnsPolicy":"ClusterFirst", "enableServiceLinks":true, "priority":0, "restartPolicy":"Always", "schedulerName":"default-scheduler", "securityContext":map[string]interface {}{}, "terminationGracePeriodSeconds":30}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}}
I1009 05:48:56.188] has:missing is not found
W1009 05:48:56.288] error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing"
I1009 05:48:56.390] Successful
I1009 05:48:56.391] message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template:
I1009 05:48:56.391] 	template was:
I1009 05:48:56.391] 		{{.missing}}
I1009 05:48:56.392] 	raw data was:
I1009 05:48:56.393] 		{"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2019-10-09T05:48:55Z","labels":{"name":"valid-pod"},"name":"valid-pod","namespace":"namespace-1570600134-4071","resourceVersion":"716","selfLink":"/api/v1/namespaces/namespace-1570600134-4071/pods/valid-pod","uid":"627b1052-83e6-4083-83d3-8d4576183868"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","enableServiceLinks":true,"priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}}
I1009 05:48:56.393] 	object given to template engine was:
I1009 05:48:56.394] 		map[apiVersion:v1 kind:Pod metadata:map[creationTimestamp:2019-10-09T05:48:55Z labels:map[name:valid-pod] name:valid-pod namespace:namespace-1570600134-4071 resourceVersion:716 selfLink:/api/v1/namespaces/namespace-1570600134-4071/pods/valid-pod uid:627b1052-83e6-4083-83d3-8d4576183868] spec:map[containers:[map[image:k8s.gcr.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst enableServiceLinks:true priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[] terminationGracePeriodSeconds:30] status:map[phase:Pending qosClass:Guaranteed]]
I1009 05:48:56.394] has:map has no entry for key "missing"
I1009 05:48:57.433] Successful
I1009 05:48:57.434] message:NAME        READY   STATUS    RESTARTS   AGE
I1009 05:48:57.435] valid-pod   0/1     Pending   0          1s
I1009 05:48:57.436] STATUS      REASON          MESSAGE
I1009 05:48:57.436] Failure     InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I1009 05:48:57.437] has:STATUS
I1009 05:48:57.438] Successful
I1009 05:48:57.438] message:NAME        READY   STATUS    RESTARTS   AGE
I1009 05:48:57.438] valid-pod   0/1     Pending   0          1s
I1009 05:48:57.438] STATUS      REASON          MESSAGE
I1009 05:48:57.438] Failure     InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I1009 05:48:57.439] has:valid-pod
I1009 05:48:58.570] Successful
I1009 05:48:58.571] message:pod/valid-pod
I1009 05:48:58.571] has not:STATUS
I1009 05:48:58.573] Successful
I1009 05:48:58.573] message:pod/valid-pod
... skipping 72 lines ...
I1009 05:48:59.725] status:
I1009 05:48:59.726]   phase: Pending
I1009 05:48:59.726]   qosClass: Guaranteed
I1009 05:48:59.726] ---
I1009 05:48:59.726] has:name: valid-pod
I1009 05:48:59.867] Successful
I1009 05:48:59.868] message:Error from server (NotFound): pods "invalid-pod" not found
I1009 05:48:59.868] has:"invalid-pod" not found
I1009 05:48:59.999] pod "valid-pod" deleted
I1009 05:49:00.196] get.sh:200: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:49:00.456] (Bpod/redis-master created
I1009 05:49:00.462] pod/valid-pod created
I1009 05:49:00.632] Successful
... skipping 35 lines ...
I1009 05:49:02.421] +++ command: run_kubectl_exec_pod_tests
I1009 05:49:02.439] +++ [1009 05:49:02] Creating namespace namespace-1570600142-21464
I1009 05:49:02.533] namespace/namespace-1570600142-21464 created
I1009 05:49:02.633] Context "test" modified.
I1009 05:49:02.648] +++ [1009 05:49:02] Testing kubectl exec POD COMMAND
I1009 05:49:02.772] Successful
I1009 05:49:02.772] message:Error from server (NotFound): pods "abc" not found
I1009 05:49:02.772] has:pods "abc" not found
I1009 05:49:03.003] pod/test-pod created
I1009 05:49:03.156] Successful
I1009 05:49:03.156] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I1009 05:49:03.157] has not:pods "test-pod" not found
I1009 05:49:03.159] Successful
I1009 05:49:03.159] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I1009 05:49:03.159] has not:pod or type/name must be specified
I1009 05:49:03.266] pod "test-pod" deleted
I1009 05:49:03.302] +++ exit code: 0
I1009 05:49:03.355] Recording: run_kubectl_exec_resource_name_tests
I1009 05:49:03.356] Running command: run_kubectl_exec_resource_name_tests
I1009 05:49:03.392] 
... skipping 2 lines ...
I1009 05:49:03.403] +++ command: run_kubectl_exec_resource_name_tests
I1009 05:49:03.420] +++ [1009 05:49:03] Creating namespace namespace-1570600143-2928
I1009 05:49:03.519] namespace/namespace-1570600143-2928 created
I1009 05:49:03.622] Context "test" modified.
I1009 05:49:03.635] +++ [1009 05:49:03] Testing kubectl exec TYPE/NAME COMMAND
I1009 05:49:03.772] Successful
I1009 05:49:03.773] message:error: the server doesn't have a resource type "foo"
I1009 05:49:03.773] has:error:
I1009 05:49:03.891] Successful
I1009 05:49:03.891] message:Error from server (NotFound): deployments.apps "bar" not found
I1009 05:49:03.892] has:"bar" not found
I1009 05:49:04.139] pod/test-pod created
I1009 05:49:04.401] replicaset.apps/frontend created
W1009 05:49:04.502] I1009 05:49:04.406476   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600143-2928", Name:"frontend", UID:"7a22aff0-4b2f-4317-bae6-f965a2117fa0", APIVersion:"apps/v1", ResourceVersion:"770", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-thn8h
W1009 05:49:04.503] I1009 05:49:04.411966   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600143-2928", Name:"frontend", UID:"7a22aff0-4b2f-4317-bae6-f965a2117fa0", APIVersion:"apps/v1", ResourceVersion:"770", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-k7hrn
W1009 05:49:04.503] I1009 05:49:04.412336   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600143-2928", Name:"frontend", UID:"7a22aff0-4b2f-4317-bae6-f965a2117fa0", APIVersion:"apps/v1", ResourceVersion:"770", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-slwfr
I1009 05:49:04.656] configmap/test-set-env-config created
I1009 05:49:04.804] Successful
I1009 05:49:04.804] message:error: cannot attach to *v1.ConfigMap: selector for *v1.ConfigMap not implemented
I1009 05:49:04.804] has:not implemented
I1009 05:49:04.928] Successful
I1009 05:49:04.929] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I1009 05:49:04.929] has not:not found
I1009 05:49:04.932] Successful
I1009 05:49:04.933] message:Error from server (BadRequest): pod test-pod does not have a host assigned
I1009 05:49:04.934] has not:pod or type/name must be specified
I1009 05:49:05.076] Successful
I1009 05:49:05.076] message:Error from server (BadRequest): pod frontend-k7hrn does not have a host assigned
I1009 05:49:05.077] has not:not found
I1009 05:49:05.080] Successful
I1009 05:49:05.081] message:Error from server (BadRequest): pod frontend-k7hrn does not have a host assigned
I1009 05:49:05.082] has not:pod or type/name must be specified
I1009 05:49:05.210] pod "test-pod" deleted
I1009 05:49:05.321] replicaset.apps "frontend" deleted
I1009 05:49:05.441] configmap "test-set-env-config" deleted
I1009 05:49:05.479] +++ exit code: 0
I1009 05:49:05.539] Recording: run_create_secret_tests
I1009 05:49:05.540] Running command: run_create_secret_tests
I1009 05:49:05.581] 
I1009 05:49:05.585] +++ Running case: test-cmd.run_create_secret_tests 
I1009 05:49:05.589] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:49:05.593] +++ command: run_create_secret_tests
I1009 05:49:05.745] Successful
I1009 05:49:05.745] message:Error from server (NotFound): secrets "mysecret" not found
I1009 05:49:05.746] has:secrets "mysecret" not found
I1009 05:49:05.996] Successful
I1009 05:49:05.997] message:Error from server (NotFound): secrets "mysecret" not found
I1009 05:49:05.998] has:secrets "mysecret" not found
I1009 05:49:06.001] Successful
I1009 05:49:06.001] message:user-specified
I1009 05:49:06.002] has:user-specified
I1009 05:49:06.116] Successful
I1009 05:49:06.226] {"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-update-cm","uid":"c51db934-d120-41f2-8740-62654bb6ff7b","resourceVersion":"792","creationTimestamp":"2019-10-09T05:49:06Z"}}
... skipping 2 lines ...
I1009 05:49:06.506] has:uid
I1009 05:49:06.612] Successful
I1009 05:49:06.613] message:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-update-cm","uid":"c51db934-d120-41f2-8740-62654bb6ff7b","resourceVersion":"793","creationTimestamp":"2019-10-09T05:49:06Z"},"data":{"key1":"config1"}}
I1009 05:49:06.613] has:config1
I1009 05:49:06.723] {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Success","details":{"name":"tester-update-cm","kind":"configmaps","uid":"c51db934-d120-41f2-8740-62654bb6ff7b"}}
I1009 05:49:06.855] Successful
I1009 05:49:06.856] message:Error from server (NotFound): configmaps "tester-update-cm" not found
I1009 05:49:06.856] has:configmaps "tester-update-cm" not found
I1009 05:49:06.881] +++ exit code: 0
I1009 05:49:06.937] Recording: run_kubectl_create_kustomization_directory_tests
I1009 05:49:06.938] Running command: run_kubectl_create_kustomization_directory_tests
I1009 05:49:06.980] 
I1009 05:49:06.987] +++ Running case: test-cmd.run_kubectl_create_kustomization_directory_tests 
... skipping 110 lines ...
I1009 05:49:11.004] valid-pod   0/1     Pending   0          0s
I1009 05:49:11.004] has:valid-pod
I1009 05:49:12.126] Successful
I1009 05:49:12.127] message:NAME        READY   STATUS    RESTARTS   AGE
I1009 05:49:12.127] valid-pod   0/1     Pending   0          1s
I1009 05:49:12.127] STATUS      REASON          MESSAGE
I1009 05:49:12.127] Failure     InternalError   an error on the server ("unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)") has prevented the request from succeeding
I1009 05:49:12.127] has:Timeout exceeded while reading body
I1009 05:49:12.252] Successful
I1009 05:49:12.252] message:NAME        READY   STATUS    RESTARTS   AGE
I1009 05:49:12.253] valid-pod   0/1     Pending   0          2s
I1009 05:49:12.253] has:valid-pod
I1009 05:49:12.360] Successful
I1009 05:49:12.360] message:error: Invalid timeout value. Timeout must be a single integer in seconds, or an integer followed by a corresponding time unit (e.g. 1s | 2m | 3h)
I1009 05:49:12.361] has:Invalid timeout value
I1009 05:49:12.468] pod "valid-pod" deleted
I1009 05:49:12.504] +++ exit code: 0
I1009 05:49:12.560] Recording: run_crd_tests
I1009 05:49:12.561] Running command: run_crd_tests
I1009 05:49:12.597] 
... skipping 158 lines ...
I1009 05:49:19.609] foo.company.com/test patched
I1009 05:49:19.777] crd.sh:236: Successful get foos/test {{.patched}}: value1
I1009 05:49:19.896] (Bfoo.company.com/test patched
I1009 05:49:20.053] crd.sh:238: Successful get foos/test {{.patched}}: value2
I1009 05:49:20.173] (Bfoo.company.com/test patched
I1009 05:49:20.353] crd.sh:240: Successful get foos/test {{.patched}}: <no value>
I1009 05:49:20.601] (B+++ [1009 05:49:20] "kubectl patch --local" returns error as expected for CustomResource: error: cannot apply strategic merge patch for company.com/v1, Kind=Foo locally, try --type merge
I1009 05:49:20.708] {
I1009 05:49:20.709]     "apiVersion": "company.com/v1",
I1009 05:49:20.709]     "kind": "Foo",
I1009 05:49:20.709]     "metadata": {
I1009 05:49:20.709]         "annotations": {
I1009 05:49:20.709]             "kubernetes.io/change-cause": "kubectl patch foos/test --server=http://127.0.0.1:8080 --match-server-version=true --patch={\"patched\":null} --type=merge --record=true"
... skipping 194 lines ...
I1009 05:49:45.124] (Bnamespace/non-native-resources created
I1009 05:49:45.297] bar.company.com/test created
I1009 05:49:45.399] crd.sh:455: Successful get bars {{len .items}}: 1
I1009 05:49:45.476] (Bnamespace "non-native-resources" deleted
I1009 05:49:50.793] crd.sh:458: Successful get bars {{len .items}}: 0
I1009 05:49:51.037] (Bcustomresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted
W1009 05:49:51.138] Error from server (NotFound): namespaces "non-native-resources" not found
I1009 05:49:51.239] customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted
I1009 05:49:51.325] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
I1009 05:49:51.476] customresourcedefinition.apiextensions.k8s.io "validfoos.company.com" deleted
I1009 05:49:51.528] +++ exit code: 0
I1009 05:49:51.581] Recording: run_cmd_with_img_tests
I1009 05:49:51.582] Running command: run_cmd_with_img_tests
... skipping 5 lines ...
I1009 05:49:51.763] namespace/namespace-1570600191-3290 created
I1009 05:49:51.889] Context "test" modified.
I1009 05:49:51.903] +++ [1009 05:49:51] Testing cmd with image
W1009 05:49:52.020] kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W1009 05:49:52.046] I1009 05:49:52.045624   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1570600191-3290", Name:"test1", UID:"4a1e86a7-71c0-4a94-99b9-9a91ebd6a5cc", APIVersion:"apps/v1", ResourceVersion:"951", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set test1-6cdffdb5b8 to 1
W1009 05:49:52.047] W1009 05:49:52.046595   49360 cacher.go:162] Terminating all watchers from cacher *unstructured.Unstructured
W1009 05:49:52.050] E1009 05:49:52.049574   52917 reflector.go:307] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:52.058] I1009 05:49:52.057065   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600191-3290", Name:"test1-6cdffdb5b8", UID:"64f0bb77-49c7-405d-9b72-f5e3fc8b9275", APIVersion:"apps/v1", ResourceVersion:"952", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test1-6cdffdb5b8-vh9zj
I1009 05:49:52.158] Successful
I1009 05:49:52.159] message:deployment.apps/test1 created
I1009 05:49:52.159] has:deployment.apps/test1 created
I1009 05:49:52.236] deployment.apps "test1" deleted
W1009 05:49:52.337] W1009 05:49:52.200875   49360 cacher.go:162] Terminating all watchers from cacher *unstructured.Unstructured
W1009 05:49:52.338] E1009 05:49:52.203384   52917 reflector.go:307] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:52.339] W1009 05:49:52.339220   49360 cacher.go:162] Terminating all watchers from cacher *unstructured.Unstructured
W1009 05:49:52.342] E1009 05:49:52.341868   52917 reflector.go:307] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:52.443] Successful
I1009 05:49:52.444] message:error: Invalid image name "InvalidImageName": invalid reference format
I1009 05:49:52.444] has:error: Invalid image name "InvalidImageName": invalid reference format
I1009 05:49:52.444] +++ exit code: 0
I1009 05:49:52.457] +++ [1009 05:49:52] Testing recursive resources
I1009 05:49:52.468] +++ [1009 05:49:52] Creating namespace namespace-1570600192-10006
W1009 05:49:52.569] W1009 05:49:52.491988   49360 cacher.go:162] Terminating all watchers from cacher *unstructured.Unstructured
W1009 05:49:52.570] E1009 05:49:52.494311   52917 reflector.go:307] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:52.670] namespace/namespace-1570600192-10006 created
I1009 05:49:52.696] Context "test" modified.
I1009 05:49:52.844] generic-resources.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:49:53.295] (Bgeneric-resources.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:53.299] (BSuccessful
I1009 05:49:53.300] message:pod/busybox0 created
I1009 05:49:53.300] pod/busybox1 created
I1009 05:49:53.300] error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I1009 05:49:53.300] has:error validating data: kind not set
W1009 05:49:53.401] E1009 05:49:53.052336   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:53.403] E1009 05:49:53.206988   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:53.403] E1009 05:49:53.344141   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:53.497] E1009 05:49:53.496845   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:53.599] generic-resources.sh:211: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:53.676] (Bgeneric-resources.sh:220: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox:
I1009 05:49:53.680] (BSuccessful
I1009 05:49:53.680] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:53.681] has:Object 'Kind' is missing
I1009 05:49:53.806] generic-resources.sh:227: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:54.211] (Bgeneric-resources.sh:231: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I1009 05:49:54.216] (BSuccessful
I1009 05:49:54.217] message:pod/busybox0 replaced
I1009 05:49:54.217] pod/busybox1 replaced
I1009 05:49:54.218] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I1009 05:49:54.218] has:error validating data: kind not set
W1009 05:49:54.320] E1009 05:49:54.054090   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:54.320] E1009 05:49:54.210209   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:54.347] E1009 05:49:54.346340   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:54.448] generic-resources.sh:236: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:54.494] (BSuccessful
I1009 05:49:54.495] message:Name:         busybox0
I1009 05:49:54.495] Namespace:    namespace-1570600192-10006
I1009 05:49:54.496] Priority:     0
I1009 05:49:54.496] Node:         <none>
... skipping 154 lines ...
I1009 05:49:54.511] QoS Class:        BestEffort
I1009 05:49:54.511] Node-Selectors:   <none>
I1009 05:49:54.511] Tolerations:      <none>
I1009 05:49:54.511] Events:           <none>
I1009 05:49:54.512] unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:54.512] has:Object 'Kind' is missing
W1009 05:49:54.613] E1009 05:49:54.499217   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:54.714] generic-resources.sh:246: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:54.904] (Bgeneric-resources.sh:250: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue:
I1009 05:49:54.907] (BSuccessful
I1009 05:49:54.907] message:pod/busybox0 annotated
I1009 05:49:54.907] pod/busybox1 annotated
I1009 05:49:54.908] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:54.908] has:Object 'Kind' is missing
I1009 05:49:55.057] generic-resources.sh:255: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:55.514] (Bgeneric-resources.sh:259: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I1009 05:49:55.518] (BSuccessful
I1009 05:49:55.518] message:Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I1009 05:49:55.518] pod/busybox0 configured
I1009 05:49:55.519] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I1009 05:49:55.519] pod/busybox1 configured
I1009 05:49:55.519] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I1009 05:49:55.519] has:error validating data: kind not set
W1009 05:49:55.620] E1009 05:49:55.055946   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:55.621] E1009 05:49:55.212790   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:55.621] E1009 05:49:55.348476   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:55.621] E1009 05:49:55.501171   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:55.642] I1009 05:49:55.641653   52917 namespace_controller.go:185] Namespace has been deleted non-native-resources
I1009 05:49:55.743] generic-resources.sh:265: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:49:55.877] (Bdeployment.apps/nginx created
W1009 05:49:55.979] I1009 05:49:55.883535   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1570600192-10006", Name:"nginx", UID:"ad8accef-f187-4715-8fbb-6000b29f5e80", APIVersion:"apps/v1", ResourceVersion:"978", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-f87d999f7 to 3
W1009 05:49:55.980] I1009 05:49:55.887826   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx-f87d999f7", UID:"49ed4442-0d76-4f28-a55f-eed30868682f", APIVersion:"apps/v1", ResourceVersion:"979", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-f87d999f7-9rqkg
W1009 05:49:55.980] I1009 05:49:55.890401   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx-f87d999f7", UID:"49ed4442-0d76-4f28-a55f-eed30868682f", APIVersion:"apps/v1", ResourceVersion:"979", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-f87d999f7-lxptl
W1009 05:49:55.981] I1009 05:49:55.892430   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx-f87d999f7", UID:"49ed4442-0d76-4f28-a55f-eed30868682f", APIVersion:"apps/v1", ResourceVersion:"979", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-f87d999f7-js5m6
W1009 05:49:56.058] E1009 05:49:56.057679   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:56.159] generic-resources.sh:269: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx:
I1009 05:49:56.164] (Bgeneric-resources.sh:270: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I1009 05:49:56.401] (Bgeneric-resources.sh:274: Successful get deployment nginx {{ .apiVersion }}: apps/v1
I1009 05:49:56.405] (BSuccessful
I1009 05:49:56.405] message:apiVersion: extensions/v1beta1
I1009 05:49:56.405] kind: Deployment
... skipping 34 lines ...
I1009 05:49:56.410]       restartPolicy: Always
I1009 05:49:56.410]       schedulerName: default-scheduler
I1009 05:49:56.410]       securityContext: {}
I1009 05:49:56.410]       terminationGracePeriodSeconds: 30
I1009 05:49:56.410] status: {}
I1009 05:49:56.410] has:extensions/v1beta1
W1009 05:49:56.511] E1009 05:49:56.214670   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:56.512] kubectl convert is DEPRECATED and will be removed in a future version.
W1009 05:49:56.512] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
W1009 05:49:56.512] E1009 05:49:56.351042   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:56.513] E1009 05:49:56.503106   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:56.613] deployment.apps "nginx" deleted
I1009 05:49:56.663] generic-resources.sh:281: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:56.909] (Bgeneric-resources.sh:285: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:56.913] (BSuccessful
I1009 05:49:56.913] message:kubectl convert is DEPRECATED and will be removed in a future version.
I1009 05:49:56.913] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
I1009 05:49:56.913] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:56.914] has:Object 'Kind' is missing
I1009 05:49:57.045] generic-resources.sh:290: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:57.166] (BSuccessful
I1009 05:49:57.167] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:57.167] has:busybox0:busybox1:
I1009 05:49:57.172] Successful
I1009 05:49:57.172] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:57.172] has:Object 'Kind' is missing
W1009 05:49:57.274] E1009 05:49:57.059937   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:57.275] E1009 05:49:57.216582   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:57.354] E1009 05:49:57.353586   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:57.455] generic-resources.sh:299: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:57.456] (Bpod/busybox0 labeled
I1009 05:49:57.456] pod/busybox1 labeled
I1009 05:49:57.456] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
W1009 05:49:57.557] E1009 05:49:57.505382   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:57.658] generic-resources.sh:304: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue:
I1009 05:49:57.658] (BSuccessful
I1009 05:49:57.658] message:pod/busybox0 labeled
I1009 05:49:57.659] pod/busybox1 labeled
I1009 05:49:57.659] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:57.659] has:Object 'Kind' is missing
I1009 05:49:57.693] generic-resources.sh:309: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:57.819] (Bpod/busybox0 patched
I1009 05:49:57.819] pod/busybox1 patched
I1009 05:49:57.820] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:57.949] generic-resources.sh:314: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox:
I1009 05:49:57.954] (BSuccessful
I1009 05:49:57.955] message:pod/busybox0 patched
I1009 05:49:57.955] pod/busybox1 patched
I1009 05:49:57.956] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:57.956] has:Object 'Kind' is missing
W1009 05:49:58.063] E1009 05:49:58.062259   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:58.164] generic-resources.sh:319: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:58.340] (Bgeneric-resources.sh:323: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:49:58.345] (BSuccessful
I1009 05:49:58.345] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I1009 05:49:58.346] pod "busybox0" force deleted
I1009 05:49:58.346] pod "busybox1" force deleted
I1009 05:49:58.346] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I1009 05:49:58.346] has:Object 'Kind' is missing
W1009 05:49:58.447] E1009 05:49:58.218381   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:58.448] E1009 05:49:58.356432   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:58.508] E1009 05:49:58.507558   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:49:58.609] generic-resources.sh:328: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:49:58.702] (Breplicationcontroller/busybox0 created
I1009 05:49:58.708] replicationcontroller/busybox1 created
W1009 05:49:58.808] I1009 05:49:58.706301   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1570600192-10006", Name:"busybox0", UID:"2dc6e6c3-e0af-416b-820e-7512b00744a7", APIVersion:"v1", ResourceVersion:"1009", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-vgpkh
W1009 05:49:58.809] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W1009 05:49:58.809] I1009 05:49:58.711554   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1570600192-10006", Name:"busybox1", UID:"680978a3-8a8a-418b-8f7e-ef8f48e39f16", APIVersion:"v1", ResourceVersion:"1011", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-cvspg
I1009 05:49:58.910] generic-resources.sh:332: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:59.014] (Bgeneric-resources.sh:337: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:49:59.172] (Bgeneric-resources.sh:338: Successful get rc busybox0 {{.spec.replicas}}: 1
I1009 05:49:59.334] (Bgeneric-resources.sh:339: Successful get rc busybox1 {{.spec.replicas}}: 1
I1009 05:49:59.649] (Bgeneric-resources.sh:344: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I1009 05:49:59.810] (Bgeneric-resources.sh:345: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I1009 05:49:59.816] (BSuccessful
I1009 05:49:59.817] message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled
I1009 05:49:59.817] horizontalpodautoscaler.autoscaling/busybox1 autoscaled
I1009 05:49:59.818] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:49:59.818] has:Object 'Kind' is missing
W1009 05:49:59.919] E1009 05:49:59.065552   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:59.919] E1009 05:49:59.220468   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:59.919] E1009 05:49:59.358563   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:49:59.920] E1009 05:49:59.510018   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:00.020] horizontalpodautoscaler.autoscaling "busybox0" deleted
I1009 05:50:00.105] horizontalpodautoscaler.autoscaling "busybox1" deleted
W1009 05:50:00.206] E1009 05:50:00.067716   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:00.225] E1009 05:50:00.223328   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:00.328] generic-resources.sh:353: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:50:00.413] (Bgeneric-resources.sh:354: Successful get rc busybox0 {{.spec.replicas}}: 1
I1009 05:50:00.569] (Bgeneric-resources.sh:355: Successful get rc busybox1 {{.spec.replicas}}: 1
I1009 05:50:00.868] (Bgeneric-resources.sh:359: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I1009 05:50:01.014] (Bgeneric-resources.sh:360: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I1009 05:50:01.017] (BSuccessful
I1009 05:50:01.018] message:service/busybox0 exposed
I1009 05:50:01.018] service/busybox1 exposed
I1009 05:50:01.019] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:01.019] has:Object 'Kind' is missing
W1009 05:50:01.119] E1009 05:50:00.360598   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:01.120] E1009 05:50:00.511795   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:01.121] E1009 05:50:01.069616   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:01.222] generic-resources.sh:366: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:50:01.300] (Bgeneric-resources.sh:367: Successful get rc busybox0 {{.spec.replicas}}: 1
I1009 05:50:01.440] (Bgeneric-resources.sh:368: Successful get rc busybox1 {{.spec.replicas}}: 1
I1009 05:50:01.741] (Bgeneric-resources.sh:372: Successful get rc busybox0 {{.spec.replicas}}: 2
I1009 05:50:01.888] (Bgeneric-resources.sh:373: Successful get rc busybox1 {{.spec.replicas}}: 2
I1009 05:50:01.893] (BSuccessful
I1009 05:50:01.894] message:replicationcontroller/busybox0 scaled
I1009 05:50:01.894] replicationcontroller/busybox1 scaled
I1009 05:50:01.895] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:01.895] has:Object 'Kind' is missing
W1009 05:50:01.996] E1009 05:50:01.225559   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:01.997] E1009 05:50:01.364009   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:01.997] E1009 05:50:01.514433   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:01.998] I1009 05:50:01.578571   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1570600192-10006", Name:"busybox0", UID:"2dc6e6c3-e0af-416b-820e-7512b00744a7", APIVersion:"v1", ResourceVersion:"1031", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-fwdtd
W1009 05:50:01.998] I1009 05:50:01.590052   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1570600192-10006", Name:"busybox1", UID:"680978a3-8a8a-418b-8f7e-ef8f48e39f16", APIVersion:"v1", ResourceVersion:"1035", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-n8g5h
W1009 05:50:02.072] E1009 05:50:02.071809   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:02.173] generic-resources.sh:378: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I1009 05:50:02.318] (Bgeneric-resources.sh:382: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:02.322] (BSuccessful
I1009 05:50:02.323] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I1009 05:50:02.323] replicationcontroller "busybox0" force deleted
I1009 05:50:02.323] replicationcontroller "busybox1" force deleted
I1009 05:50:02.324] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:02.324] has:Object 'Kind' is missing
W1009 05:50:02.424] E1009 05:50:02.227591   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:02.425] E1009 05:50:02.367397   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:02.518] E1009 05:50:02.516788   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:02.618] generic-resources.sh:387: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:02.716] (Bdeployment.apps/nginx1-deployment created
I1009 05:50:02.720] deployment.apps/nginx0-deployment created
W1009 05:50:02.822] error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W1009 05:50:02.823] I1009 05:50:02.721081   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1570600192-10006", Name:"nginx1-deployment", UID:"c2105c43-462a-4fd9-bb55-78e6632c424c", APIVersion:"apps/v1", ResourceVersion:"1051", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx1-deployment-7bdbbfb5cf to 2
W1009 05:50:02.823] I1009 05:50:02.729915   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx1-deployment-7bdbbfb5cf", UID:"45db2a78-4b7b-42bd-9c93-e29a1d9f3de1", APIVersion:"apps/v1", ResourceVersion:"1052", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-7bdbbfb5cf-tldhb
W1009 05:50:02.823] I1009 05:50:02.748966   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx1-deployment-7bdbbfb5cf", UID:"45db2a78-4b7b-42bd-9c93-e29a1d9f3de1", APIVersion:"apps/v1", ResourceVersion:"1052", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-7bdbbfb5cf-5dn6r
W1009 05:50:02.824] I1009 05:50:02.749799   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1570600192-10006", Name:"nginx0-deployment", UID:"e508fe53-daee-4aeb-9ba1-3579d6b7e1ec", APIVersion:"apps/v1", ResourceVersion:"1053", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx0-deployment-57c6bff7f6 to 2
W1009 05:50:02.824] I1009 05:50:02.756296   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx0-deployment-57c6bff7f6", UID:"027187d7-9854-46a6-8597-340ebba877dd", APIVersion:"apps/v1", ResourceVersion:"1058", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-57c6bff7f6-5qs9x
W1009 05:50:02.824] I1009 05:50:02.762636   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1570600192-10006", Name:"nginx0-deployment-57c6bff7f6", UID:"027187d7-9854-46a6-8597-340ebba877dd", APIVersion:"apps/v1", ResourceVersion:"1058", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-57c6bff7f6-sfz79
I1009 05:50:02.926] generic-resources.sh:391: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment:
I1009 05:50:03.063] (Bgeneric-resources.sh:392: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I1009 05:50:03.353] (Bgeneric-resources.sh:396: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I1009 05:50:03.358] (BSuccessful
I1009 05:50:03.359] message:deployment.apps/nginx1-deployment skipped rollback (current template already matches revision 1)
I1009 05:50:03.360] deployment.apps/nginx0-deployment skipped rollback (current template already matches revision 1)
I1009 05:50:03.360] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I1009 05:50:03.360] has:Object 'Kind' is missing
W1009 05:50:03.461] E1009 05:50:03.074131   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:03.462] E1009 05:50:03.229398   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:03.462] E1009 05:50:03.369443   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:03.521] E1009 05:50:03.520434   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:03.622] deployment.apps/nginx1-deployment paused
I1009 05:50:03.623] deployment.apps/nginx0-deployment paused
I1009 05:50:03.657] generic-resources.sh:404: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true:
I1009 05:50:03.663] (BSuccessful
I1009 05:50:03.664] message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I1009 05:50:03.665] has:Object 'Kind' is missing
I1009 05:50:03.800] deployment.apps/nginx1-deployment resumed
I1009 05:50:03.806] deployment.apps/nginx0-deployment resumed
I1009 05:50:03.975] generic-resources.sh:410: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: <no value>:<no value>:
I1009 05:50:03.981] (BSuccessful
I1009 05:50:03.981] message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I1009 05:50:03.981] has:Object 'Kind' is missing
W1009 05:50:04.082] E1009 05:50:04.076267   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:04.184] Successful
I1009 05:50:04.185] message:deployment.apps/nginx1-deployment 
I1009 05:50:04.185] REVISION  CHANGE-CAUSE
I1009 05:50:04.186] 1         <none>
I1009 05:50:04.186] 
I1009 05:50:04.186] deployment.apps/nginx0-deployment 
I1009 05:50:04.186] REVISION  CHANGE-CAUSE
I1009 05:50:04.186] 1         <none>
I1009 05:50:04.186] 
I1009 05:50:04.187] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I1009 05:50:04.188] has:nginx0-deployment
I1009 05:50:04.188] Successful
I1009 05:50:04.188] message:deployment.apps/nginx1-deployment 
I1009 05:50:04.188] REVISION  CHANGE-CAUSE
I1009 05:50:04.189] 1         <none>
I1009 05:50:04.189] 
I1009 05:50:04.189] deployment.apps/nginx0-deployment 
I1009 05:50:04.189] REVISION  CHANGE-CAUSE
I1009 05:50:04.189] 1         <none>
I1009 05:50:04.189] 
I1009 05:50:04.190] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I1009 05:50:04.190] has:nginx1-deployment
I1009 05:50:04.190] Successful
I1009 05:50:04.190] message:deployment.apps/nginx1-deployment 
I1009 05:50:04.190] REVISION  CHANGE-CAUSE
I1009 05:50:04.190] 1         <none>
I1009 05:50:04.190] 
I1009 05:50:04.190] deployment.apps/nginx0-deployment 
I1009 05:50:04.191] REVISION  CHANGE-CAUSE
I1009 05:50:04.191] 1         <none>
I1009 05:50:04.191] 
I1009 05:50:04.191] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I1009 05:50:04.191] has:Object 'Kind' is missing
I1009 05:50:04.274] deployment.apps "nginx1-deployment" force deleted
I1009 05:50:04.285] deployment.apps "nginx0-deployment" force deleted
W1009 05:50:04.387] E1009 05:50:04.231513   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:04.387] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W1009 05:50:04.388] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
W1009 05:50:04.388] E1009 05:50:04.371491   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:04.523] E1009 05:50:04.522746   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:05.080] E1009 05:50:05.079162   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:05.235] E1009 05:50:05.234039   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:05.374] E1009 05:50:05.373821   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:05.476] generic-resources.sh:426: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:05.744] (Breplicationcontroller/busybox0 created
I1009 05:50:05.752] replicationcontroller/busybox1 created
W1009 05:50:05.853] E1009 05:50:05.524951   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:05.854] I1009 05:50:05.750888   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1570600192-10006", Name:"busybox0", UID:"053d2fa2-fd57-4d57-ad1c-db36d1776dfc", APIVersion:"v1", ResourceVersion:"1102", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-czlbt
W1009 05:50:05.854] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W1009 05:50:05.855] I1009 05:50:05.765510   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1570600192-10006", Name:"busybox1", UID:"a1875157-2e7f-4ed1-b43b-1eab843d4b80", APIVersion:"v1", ResourceVersion:"1103", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-4lbhj
W1009 05:50:05.862] I1009 05:50:05.861644   52917 shared_informer.go:197] Waiting for caches to sync for garbage collector
W1009 05:50:05.862] I1009 05:50:05.861780   52917 shared_informer.go:204] Caches are synced for garbage collector 
W1009 05:50:05.902] I1009 05:50:05.901567   52917 shared_informer.go:197] Waiting for caches to sync for resource quota
W1009 05:50:05.903] I1009 05:50:05.903383   52917 shared_informer.go:204] Caches are synced for resource quota 
I1009 05:50:06.004] generic-resources.sh:430: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
... skipping 4 lines ...
I1009 05:50:06.070] has:no rollbacker has been implemented for "ReplicationController"
I1009 05:50:06.074] Successful
I1009 05:50:06.074] message:no rollbacker has been implemented for "ReplicationController"
I1009 05:50:06.074] no rollbacker has been implemented for "ReplicationController"
I1009 05:50:06.075] unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.075] has:Object 'Kind' is missing
W1009 05:50:06.176] E1009 05:50:06.082153   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:06.236] E1009 05:50:06.235837   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:06.337] Successful
I1009 05:50:06.339] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.340] error: replicationcontrollers "busybox0" pausing is not supported
I1009 05:50:06.340] error: replicationcontrollers "busybox1" pausing is not supported
I1009 05:50:06.340] has:Object 'Kind' is missing
I1009 05:50:06.340] Successful
I1009 05:50:06.341] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.341] error: replicationcontrollers "busybox0" pausing is not supported
I1009 05:50:06.341] error: replicationcontrollers "busybox1" pausing is not supported
I1009 05:50:06.341] has:replicationcontrollers "busybox0" pausing is not supported
I1009 05:50:06.342] Successful
I1009 05:50:06.342] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.343] error: replicationcontrollers "busybox0" pausing is not supported
I1009 05:50:06.343] error: replicationcontrollers "busybox1" pausing is not supported
I1009 05:50:06.343] has:replicationcontrollers "busybox1" pausing is not supported
I1009 05:50:06.351] Successful
I1009 05:50:06.352] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.352] error: replicationcontrollers "busybox0" resuming is not supported
I1009 05:50:06.352] error: replicationcontrollers "busybox1" resuming is not supported
I1009 05:50:06.353] has:Object 'Kind' is missing
I1009 05:50:06.357] Successful
I1009 05:50:06.358] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.358] error: replicationcontrollers "busybox0" resuming is not supported
I1009 05:50:06.358] error: replicationcontrollers "busybox1" resuming is not supported
I1009 05:50:06.358] has:replicationcontrollers "busybox0" resuming is not supported
I1009 05:50:06.362] Successful
I1009 05:50:06.363] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I1009 05:50:06.363] error: replicationcontrollers "busybox0" resuming is not supported
I1009 05:50:06.363] error: replicationcontrollers "busybox1" resuming is not supported
I1009 05:50:06.363] has:replicationcontrollers "busybox1" resuming is not supported
W1009 05:50:06.464] E1009 05:50:06.375454   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:06.464] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W1009 05:50:06.472] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
W1009 05:50:06.527] E1009 05:50:06.526447   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:06.628] replicationcontroller "busybox0" force deleted
I1009 05:50:06.628] replicationcontroller "busybox1" force deleted
W1009 05:50:07.085] E1009 05:50:07.084158   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:07.238] E1009 05:50:07.237784   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:07.378] E1009 05:50:07.377889   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:07.487] Recording: run_namespace_tests
I1009 05:50:07.487] Running command: run_namespace_tests
I1009 05:50:07.536] 
I1009 05:50:07.541] +++ Running case: test-cmd.run_namespace_tests 
I1009 05:50:07.546] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:07.551] +++ command: run_namespace_tests
I1009 05:50:07.567] +++ [1009 05:50:07] Testing kubectl(v1:namespaces)
W1009 05:50:07.668] E1009 05:50:07.528501   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:07.769] namespace/my-namespace created
I1009 05:50:07.816] core.sh:1308: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
I1009 05:50:07.923] (Bnamespace "my-namespace" deleted
W1009 05:50:08.086] E1009 05:50:08.086031   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:08.240] E1009 05:50:08.239915   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:08.381] E1009 05:50:08.380540   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:08.531] E1009 05:50:08.530267   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:09.089] E1009 05:50:09.088365   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:09.243] E1009 05:50:09.242343   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:09.383] E1009 05:50:09.382860   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:09.533] E1009 05:50:09.532275   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:10.091] E1009 05:50:10.090696   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:10.245] E1009 05:50:10.244693   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:10.385] E1009 05:50:10.385021   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:10.535] E1009 05:50:10.534437   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:11.093] E1009 05:50:11.092817   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:11.248] E1009 05:50:11.247039   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:11.388] E1009 05:50:11.387912   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:11.537] E1009 05:50:11.536494   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:12.095] E1009 05:50:12.094654   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:12.250] E1009 05:50:12.249173   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:12.391] E1009 05:50:12.390115   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:12.539] E1009 05:50:12.538641   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:13.063] namespace/my-namespace condition met
W1009 05:50:13.165] E1009 05:50:13.096431   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:13.252] E1009 05:50:13.251479   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:13.354] Successful
I1009 05:50:13.354] message:Error from server (NotFound): namespaces "my-namespace" not found
I1009 05:50:13.354] has: not found
I1009 05:50:13.354] namespace/my-namespace created
W1009 05:50:13.455] E1009 05:50:13.392131   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:13.542] E1009 05:50:13.541290   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:13.644] core.sh:1317: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
I1009 05:50:13.845] (BSuccessful
I1009 05:50:13.846] message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
I1009 05:50:13.847] namespace "kube-node-lease" deleted
I1009 05:50:13.847] namespace "my-namespace" deleted
I1009 05:50:13.847] namespace "namespace-1570600024-30424" deleted
... skipping 27 lines ...
I1009 05:50:13.856] namespace "namespace-1570600148-6280" deleted
I1009 05:50:13.856] namespace "namespace-1570600149-6860" deleted
I1009 05:50:13.856] namespace "namespace-1570600152-16349" deleted
I1009 05:50:13.857] namespace "namespace-1570600154-22137" deleted
I1009 05:50:13.857] namespace "namespace-1570600191-3290" deleted
I1009 05:50:13.857] namespace "namespace-1570600192-10006" deleted
I1009 05:50:13.857] Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
I1009 05:50:13.858] Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
I1009 05:50:13.858] Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
I1009 05:50:13.858] has:warning: deleting cluster-scoped resources
I1009 05:50:13.859] Successful
I1009 05:50:13.859] message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
I1009 05:50:13.859] namespace "kube-node-lease" deleted
I1009 05:50:13.860] namespace "my-namespace" deleted
I1009 05:50:13.860] namespace "namespace-1570600024-30424" deleted
... skipping 27 lines ...
I1009 05:50:13.870] namespace "namespace-1570600148-6280" deleted
I1009 05:50:13.870] namespace "namespace-1570600149-6860" deleted
I1009 05:50:13.870] namespace "namespace-1570600152-16349" deleted
I1009 05:50:13.871] namespace "namespace-1570600154-22137" deleted
I1009 05:50:13.871] namespace "namespace-1570600191-3290" deleted
I1009 05:50:13.871] namespace "namespace-1570600192-10006" deleted
I1009 05:50:13.872] Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
I1009 05:50:13.872] Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
I1009 05:50:13.872] Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
I1009 05:50:13.872] has:namespace "my-namespace" deleted
I1009 05:50:14.016] core.sh:1329: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"other\" }}found{{end}}{{end}}:: :
I1009 05:50:14.143] (Bnamespace/other created
W1009 05:50:14.244] E1009 05:50:14.098507   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:14.255] E1009 05:50:14.253590   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:14.356] core.sh:1333: Successful get namespaces/other {{.metadata.name}}: other
I1009 05:50:14.478] (Bcore.sh:1337: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:14.763] (Bpod/valid-pod created
W1009 05:50:14.864] E1009 05:50:14.393795   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:14.865] I1009 05:50:14.480999   52917 horizontal.go:341] Horizontal Pod Autoscaler busybox0 has been deleted in namespace-1570600192-10006
W1009 05:50:14.865] I1009 05:50:14.488902   52917 horizontal.go:341] Horizontal Pod Autoscaler busybox1 has been deleted in namespace-1570600192-10006
W1009 05:50:14.865] E1009 05:50:14.544499   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:14.966] core.sh:1341: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:50:15.051] (Bcore.sh:1343: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:50:15.175] (BSuccessful
I1009 05:50:15.175] message:error: a resource cannot be retrieved by name across all namespaces
I1009 05:50:15.175] has:a resource cannot be retrieved by name across all namespaces
W1009 05:50:15.276] E1009 05:50:15.100491   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:15.277] E1009 05:50:15.255491   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:15.378] core.sh:1350: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I1009 05:50:15.417] (Bpod "valid-pod" force deleted
W1009 05:50:15.520] E1009 05:50:15.396415   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:15.521] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W1009 05:50:15.547] E1009 05:50:15.546554   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:15.648] core.sh:1354: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:15.667] (Bnamespace "other" deleted
W1009 05:50:16.103] E1009 05:50:16.102544   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:16.258] E1009 05:50:16.257543   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:16.399] E1009 05:50:16.398395   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:16.549] E1009 05:50:16.548860   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:17.104] E1009 05:50:17.104042   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:17.260] E1009 05:50:17.259405   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:17.401] E1009 05:50:17.400996   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:17.551] E1009 05:50:17.550571   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:18.106] E1009 05:50:18.105394   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:18.262] E1009 05:50:18.261943   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:18.403] E1009 05:50:18.402883   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:18.554] E1009 05:50:18.553040   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:19.107] E1009 05:50:19.106831   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:19.267] E1009 05:50:19.266194   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:19.407] E1009 05:50:19.405407   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:19.556] E1009 05:50:19.555124   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:20.109] E1009 05:50:20.108924   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:20.271] E1009 05:50:20.270139   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:20.409] E1009 05:50:20.407954   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:20.558] E1009 05:50:20.557136   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:20.851] +++ exit code: 0
I1009 05:50:20.923] Recording: run_secrets_test
I1009 05:50:20.924] Running command: run_secrets_test
I1009 05:50:20.973] 
I1009 05:50:20.979] +++ Running case: test-cmd.run_secrets_test 
I1009 05:50:20.985] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:20.990] +++ command: run_secrets_test
I1009 05:50:21.011] +++ [1009 05:50:21] Creating namespace namespace-1570600221-28567
W1009 05:50:21.112] E1009 05:50:21.111521   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:21.213] namespace/namespace-1570600221-28567 created
I1009 05:50:21.252] Context "test" modified.
I1009 05:50:21.275] +++ [1009 05:50:21] Testing secrets
W1009 05:50:21.377] E1009 05:50:21.272083   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:21.395] I1009 05:50:21.393310   68920 loader.go:375] Config loaded from file:  /tmp/tmp.Mjy9Cwy53i/.kube/config
W1009 05:50:21.410] E1009 05:50:21.409672   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:21.511] Successful
I1009 05:50:21.512] message:apiVersion: v1
I1009 05:50:21.512] data:
I1009 05:50:21.512]   key1: dmFsdWUx
I1009 05:50:21.513] kind: Secret
I1009 05:50:21.513] metadata:
... skipping 26 lines ...
I1009 05:50:21.516] metadata:
I1009 05:50:21.516]   creationTimestamp: null
I1009 05:50:21.516]   name: test
I1009 05:50:21.517] has not:example.com
I1009 05:50:21.577] core.sh:725: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-secrets\" }}found{{end}}{{end}}:: :
I1009 05:50:21.698] (Bnamespace/test-secrets created
W1009 05:50:21.799] E1009 05:50:21.559294   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:21.900] core.sh:729: Successful get namespaces/test-secrets {{.metadata.name}}: test-secrets
I1009 05:50:21.987] (Bcore.sh:733: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:22.103] (Bsecret/test-secret created
W1009 05:50:22.204] E1009 05:50:22.113811   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:22.275] E1009 05:50:22.274271   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:22.376] core.sh:737: Successful get secret/test-secret --namespace=test-secrets {{.metadata.name}}: test-secret
I1009 05:50:22.377] (Bcore.sh:738: Successful get secret/test-secret --namespace=test-secrets {{.type}}: test-type
I1009 05:50:22.606] (Bsecret "test-secret" deleted
W1009 05:50:22.707] E1009 05:50:22.411368   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:22.708] E1009 05:50:22.562422   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:22.808] core.sh:748: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:22.851] (Bsecret/test-secret created
I1009 05:50:22.989] core.sh:752: Successful get secret/test-secret --namespace=test-secrets {{.metadata.name}}: test-secret
I1009 05:50:23.122] (Bcore.sh:753: Successful get secret/test-secret --namespace=test-secrets {{.type}}: kubernetes.io/dockerconfigjson
I1009 05:50:23.352] (Bsecret "test-secret" deleted
W1009 05:50:23.453] E1009 05:50:23.115409   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:23.454] I1009 05:50:23.207442   52917 namespace_controller.go:185] Namespace has been deleted my-namespace
W1009 05:50:23.454] E1009 05:50:23.276490   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:23.455] E1009 05:50:23.413036   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:23.556] core.sh:763: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:23.607] (Bsecret/test-secret created
W1009 05:50:23.709] E1009 05:50:23.564992   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:23.810] core.sh:766: Successful get secret/test-secret --namespace=test-secrets {{.metadata.name}}: test-secret
I1009 05:50:23.873] (Bcore.sh:767: Successful get secret/test-secret --namespace=test-secrets {{.type}}: kubernetes.io/tls
I1009 05:50:23.985] (Bsecret "test-secret" deleted
W1009 05:50:24.086] I1009 05:50:23.833951   52917 namespace_controller.go:185] Namespace has been deleted kube-node-lease
W1009 05:50:24.087] I1009 05:50:23.872547   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600024-30424
W1009 05:50:24.087] I1009 05:50:23.878281   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600052-12785
... skipping 2 lines ...
W1009 05:50:24.089] I1009 05:50:23.886157   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600027-3425
W1009 05:50:24.089] I1009 05:50:23.894103   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600051-6038
W1009 05:50:24.089] I1009 05:50:23.899436   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600051-10162
W1009 05:50:24.089] I1009 05:50:23.902450   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600032-15029
W1009 05:50:24.090] I1009 05:50:23.921315   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600040-13940
W1009 05:50:24.090] I1009 05:50:24.074103   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600063-19919
W1009 05:50:24.118] E1009 05:50:24.117593   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:24.126] I1009 05:50:24.125663   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600086-2597
W1009 05:50:24.135] I1009 05:50:24.135002   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600083-12381
W1009 05:50:24.136] I1009 05:50:24.135493   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600082-29442
W1009 05:50:24.149] I1009 05:50:24.148239   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600092-2273
W1009 05:50:24.158] I1009 05:50:24.157431   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600065-28802
W1009 05:50:24.159] I1009 05:50:24.157508   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600093-6458
W1009 05:50:24.171] I1009 05:50:24.171125   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600089-19647
W1009 05:50:24.194] I1009 05:50:24.194115   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600087-13021
W1009 05:50:24.216] I1009 05:50:24.215163   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600097-360
W1009 05:50:24.279] E1009 05:50:24.278500   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:24.367] I1009 05:50:24.366948   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600101-31966
W1009 05:50:24.415] E1009 05:50:24.414964   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:24.437] I1009 05:50:24.436685   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600121-31616
W1009 05:50:24.438] I1009 05:50:24.437963   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600142-21464
W1009 05:50:24.440] I1009 05:50:24.440162   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600123-12411
W1009 05:50:24.461] I1009 05:50:24.460221   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600124-125
W1009 05:50:24.477] I1009 05:50:24.476815   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600148-13644
W1009 05:50:24.481] I1009 05:50:24.480850   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600134-4071
W1009 05:50:24.492] I1009 05:50:24.491151   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600148-6280
W1009 05:50:24.493] I1009 05:50:24.491151   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600102-21965
W1009 05:50:24.513] I1009 05:50:24.512741   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600143-2928
W1009 05:50:24.568] E1009 05:50:24.567496   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:24.613] I1009 05:50:24.612349   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600149-6860
W1009 05:50:24.647] I1009 05:50:24.646845   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600154-22137
W1009 05:50:24.658] I1009 05:50:24.658055   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600152-16349
W1009 05:50:24.680] I1009 05:50:24.679860   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600191-3290
W1009 05:50:24.728] I1009 05:50:24.727642   52917 namespace_controller.go:185] Namespace has been deleted namespace-1570600192-10006
I1009 05:50:24.829] secret/test-secret created
... skipping 2 lines ...
I1009 05:50:24.830] (Bsecret "test-secret" deleted
I1009 05:50:24.830] secret/secret-string-data created
I1009 05:50:24.916] core.sh:796: Successful get secret/secret-string-data --namespace=test-secrets  {{.data}}: map[k1:djE= k2:djI=]
I1009 05:50:25.062] (Bcore.sh:797: Successful get secret/secret-string-data --namespace=test-secrets  {{.data}}: map[k1:djE= k2:djI=]
I1009 05:50:25.209] (Bcore.sh:798: Successful get secret/secret-string-data --namespace=test-secrets  {{.stringData}}: <no value>
I1009 05:50:25.327] (Bsecret "secret-string-data" deleted
W1009 05:50:25.428] E1009 05:50:25.120006   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:25.429] E1009 05:50:25.280660   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:25.429] E1009 05:50:25.417008   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:25.530] core.sh:807: Successful get secrets --namespace=test-secrets {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:25.715] (Bsecret "test-secret" deleted
W1009 05:50:25.816] E1009 05:50:25.569174   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:25.817] I1009 05:50:25.795517   52917 namespace_controller.go:185] Namespace has been deleted other
I1009 05:50:25.917] namespace "test-secrets" deleted
W1009 05:50:26.122] E1009 05:50:26.121743   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:26.284] E1009 05:50:26.282952   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:26.419] E1009 05:50:26.418943   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:26.572] E1009 05:50:26.571215   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:27.124] E1009 05:50:27.123880   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:27.286] E1009 05:50:27.285091   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:27.421] E1009 05:50:27.420963   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:27.573] E1009 05:50:27.573143   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:28.126] E1009 05:50:28.125831   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:28.288] E1009 05:50:28.287238   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:28.424] E1009 05:50:28.423259   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:28.576] E1009 05:50:28.575511   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:29.129] E1009 05:50:29.128239   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:29.290] E1009 05:50:29.289717   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:29.425] E1009 05:50:29.424706   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:29.578] E1009 05:50:29.577153   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:30.134] E1009 05:50:30.133298   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:30.292] E1009 05:50:30.291620   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:30.427] E1009 05:50:30.426476   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:30.580] E1009 05:50:30.579140   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:30.996] +++ exit code: 0
I1009 05:50:31.056] Recording: run_configmap_tests
I1009 05:50:31.056] Running command: run_configmap_tests
I1009 05:50:31.092] 
I1009 05:50:31.097] +++ Running case: test-cmd.run_configmap_tests 
I1009 05:50:31.100] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:31.105] +++ command: run_configmap_tests
I1009 05:50:31.121] +++ [1009 05:50:31] Creating namespace namespace-1570600231-30689
I1009 05:50:31.218] namespace/namespace-1570600231-30689 created
W1009 05:50:31.319] E1009 05:50:31.134916   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:31.320] E1009 05:50:31.293744   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:31.420] Context "test" modified.
I1009 05:50:31.421] +++ [1009 05:50:31] Testing configmaps
W1009 05:50:31.522] E1009 05:50:31.428665   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:31.582] E1009 05:50:31.581241   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:31.683] configmap/test-configmap created
I1009 05:50:31.737] core.sh:28: Successful get configmap/test-configmap {{.metadata.name}}: test-configmap
I1009 05:50:31.842] (Bconfigmap "test-configmap" deleted
I1009 05:50:32.016] core.sh:33: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-configmaps\" }}found{{end}}{{end}}:: :
I1009 05:50:32.124] (Bnamespace/test-configmaps created
W1009 05:50:32.225] E1009 05:50:32.136964   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:32.296] E1009 05:50:32.295842   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:32.397] core.sh:37: Successful get namespaces/test-configmaps {{.metadata.name}}: test-configmaps
I1009 05:50:32.404] (Bcore.sh:41: Successful get configmaps {{range.items}}{{ if eq .metadata.name \"test-configmap\" }}found{{end}}{{end}}:: :
I1009 05:50:32.537] (Bcore.sh:42: Successful get configmaps {{range.items}}{{ if eq .metadata.name \"test-binary-configmap\" }}found{{end}}{{end}}:: :
I1009 05:50:32.638] (Bconfigmap/test-configmap created
W1009 05:50:32.740] E1009 05:50:32.430508   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:32.741] E1009 05:50:32.583180   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:32.842] configmap/test-binary-configmap created
I1009 05:50:32.894] core.sh:48: Successful get configmap/test-configmap --namespace=test-configmaps {{.metadata.name}}: test-configmap
I1009 05:50:33.043] (Bcore.sh:49: Successful get configmap/test-binary-configmap --namespace=test-configmaps {{.metadata.name}}: test-binary-configmap
I1009 05:50:33.395] (Bconfigmap "test-configmap" deleted
W1009 05:50:33.496] E1009 05:50:33.142209   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:33.496] E1009 05:50:33.298795   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:33.497] E1009 05:50:33.432550   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:33.585] E1009 05:50:33.585017   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:33.686] configmap "test-binary-configmap" deleted
I1009 05:50:33.687] namespace "test-configmaps" deleted
W1009 05:50:34.145] E1009 05:50:34.144819   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:34.301] E1009 05:50:34.300721   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:34.435] E1009 05:50:34.434332   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:34.587] E1009 05:50:34.587087   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:35.147] E1009 05:50:35.146334   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:35.303] E1009 05:50:35.302679   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:35.437] E1009 05:50:35.436339   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:35.589] E1009 05:50:35.588781   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:35.956] I1009 05:50:35.955401   52917 namespace_controller.go:185] Namespace has been deleted test-secrets
W1009 05:50:36.149] E1009 05:50:36.148319   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:36.305] E1009 05:50:36.304586   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:36.439] E1009 05:50:36.438447   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:36.592] E1009 05:50:36.591132   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:37.151] E1009 05:50:37.150077   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:37.307] E1009 05:50:37.306512   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:37.441] E1009 05:50:37.440667   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:37.593] E1009 05:50:37.592969   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:38.153] E1009 05:50:38.152568   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:38.309] E1009 05:50:38.308563   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:38.443] E1009 05:50:38.442647   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:38.597] E1009 05:50:38.596103   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:38.797] +++ exit code: 0
I1009 05:50:38.854] Recording: run_client_config_tests
I1009 05:50:38.855] Running command: run_client_config_tests
I1009 05:50:38.894] 
I1009 05:50:38.898] +++ Running case: test-cmd.run_client_config_tests 
I1009 05:50:38.902] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:38.906] +++ command: run_client_config_tests
I1009 05:50:38.925] +++ [1009 05:50:38] Creating namespace namespace-1570600238-24952
I1009 05:50:39.037] namespace/namespace-1570600238-24952 created
I1009 05:50:39.146] Context "test" modified.
I1009 05:50:39.165] +++ [1009 05:50:39] Testing client config
W1009 05:50:39.267] E1009 05:50:39.155095   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:39.311] E1009 05:50:39.310544   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:39.412] Successful
I1009 05:50:39.412] message:error: stat missing: no such file or directory
I1009 05:50:39.413] has:missing: no such file or directory
I1009 05:50:39.413] Successful
I1009 05:50:39.413] message:error: stat missing: no such file or directory
I1009 05:50:39.413] has:missing: no such file or directory
W1009 05:50:39.514] E1009 05:50:39.444892   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:39.599] E1009 05:50:39.598490   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:39.700] Successful
I1009 05:50:39.701] message:error: stat missing: no such file or directory
I1009 05:50:39.701] has:missing: no such file or directory
I1009 05:50:39.701] Successful
I1009 05:50:39.701] message:Error in configuration: context was not found for specified context: missing-context
I1009 05:50:39.702] has:context was not found for specified context: missing-context
I1009 05:50:39.767] Successful
I1009 05:50:39.768] message:error: no server found for cluster "missing-cluster"
I1009 05:50:39.768] has:no server found for cluster "missing-cluster"
I1009 05:50:39.892] Successful
I1009 05:50:39.893] message:error: auth info "missing-user" does not exist
I1009 05:50:39.893] has:auth info "missing-user" does not exist
I1009 05:50:40.130] Successful
I1009 05:50:40.131] message:error: error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1" in scheme "k8s.io/client-go/tools/clientcmd/api/latest/latest.go:50"
I1009 05:50:40.131] has:error loading config file
W1009 05:50:40.232] E1009 05:50:40.157914   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:40.314] E1009 05:50:40.313033   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:40.414] Successful
I1009 05:50:40.415] message:error: stat missing-config: no such file or directory
I1009 05:50:40.415] has:no such file or directory
I1009 05:50:40.415] +++ exit code: 0
I1009 05:50:40.416] Recording: run_service_accounts_tests
I1009 05:50:40.416] Running command: run_service_accounts_tests
I1009 05:50:40.416] 
I1009 05:50:40.416] +++ Running case: test-cmd.run_service_accounts_tests 
I1009 05:50:40.416] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:40.416] +++ command: run_service_accounts_tests
I1009 05:50:40.417] +++ [1009 05:50:40] Creating namespace namespace-1570600240-16272
I1009 05:50:40.507] namespace/namespace-1570600240-16272 created
W1009 05:50:40.609] E1009 05:50:40.446809   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:40.610] E1009 05:50:40.602576   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:40.710] Context "test" modified.
I1009 05:50:40.711] +++ [1009 05:50:40] Testing service accounts
I1009 05:50:40.804] core.sh:828: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-service-accounts\" }}found{{end}}{{end}}:: :
I1009 05:50:40.908] (Bnamespace/test-service-accounts created
I1009 05:50:41.060] core.sh:832: Successful get namespaces/test-service-accounts {{.metadata.name}}: test-service-accounts
I1009 05:50:41.163] (Bserviceaccount/test-service-account created
W1009 05:50:41.264] E1009 05:50:41.159439   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:41.315] E1009 05:50:41.314554   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:41.416] core.sh:838: Successful get serviceaccount/test-service-account --namespace=test-service-accounts {{.metadata.name}}: test-service-account
I1009 05:50:41.421] (Bserviceaccount "test-service-account" deleted
W1009 05:50:41.522] E1009 05:50:41.448627   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:41.605] E1009 05:50:41.605043   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:41.706] namespace "test-service-accounts" deleted
W1009 05:50:42.162] E1009 05:50:42.161512   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:42.317] E1009 05:50:42.316546   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:42.452] E1009 05:50:42.451277   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:42.608] E1009 05:50:42.607265   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:43.164] E1009 05:50:43.163435   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:43.319] E1009 05:50:43.318617   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:43.454] E1009 05:50:43.453251   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:43.610] E1009 05:50:43.609379   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:43.753] I1009 05:50:43.752663   52917 namespace_controller.go:185] Namespace has been deleted test-configmaps
W1009 05:50:44.166] E1009 05:50:44.165632   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:44.321] E1009 05:50:44.320528   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:44.456] E1009 05:50:44.455175   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:44.612] E1009 05:50:44.611602   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:45.168] E1009 05:50:45.167357   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:45.323] E1009 05:50:45.322331   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:45.457] E1009 05:50:45.457121   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:45.613] E1009 05:50:45.613017   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:46.170] E1009 05:50:46.169617   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:46.326] E1009 05:50:46.325533   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:46.460] E1009 05:50:46.459342   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:46.615] E1009 05:50:46.614619   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:46.716] +++ exit code: 0
I1009 05:50:46.775] Recording: run_job_tests
I1009 05:50:46.776] Running command: run_job_tests
I1009 05:50:46.817] 
I1009 05:50:46.820] +++ Running case: test-cmd.run_job_tests 
I1009 05:50:46.826] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:46.830] +++ command: run_job_tests
I1009 05:50:46.849] +++ [1009 05:50:46] Creating namespace namespace-1570600246-21539
I1009 05:50:46.948] namespace/namespace-1570600246-21539 created
I1009 05:50:47.051] Context "test" modified.
I1009 05:50:47.067] +++ [1009 05:50:47] Testing job
W1009 05:50:47.173] E1009 05:50:47.172392   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:47.275] batch.sh:30: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-jobs\" }}found{{end}}{{end}}:: :
I1009 05:50:47.305] (Bnamespace/test-jobs created
W1009 05:50:47.406] E1009 05:50:47.327869   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:47.461] E1009 05:50:47.461066   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:47.534] kubectl run --generator=cronjob/v1beta1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W1009 05:50:47.617] E1009 05:50:47.616883   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:47.719] batch.sh:34: Successful get namespaces/test-jobs {{.metadata.name}}: test-jobs
I1009 05:50:47.719] (Bcronjob.batch/pi created
I1009 05:50:47.720] batch.sh:39: Successful get cronjob/pi --namespace=test-jobs {{.metadata.name}}: pi
I1009 05:50:47.791] (BNAME   SCHEDULE       SUSPEND   ACTIVE   LAST SCHEDULE   AGE
I1009 05:50:47.792] pi     59 23 31 2 *   False     0        <none>          0s
I1009 05:50:47.916] Name:                          pi
I1009 05:50:47.917] Namespace:                     test-jobs
I1009 05:50:47.918] Labels:                        run=pi
I1009 05:50:47.918] Annotations:                   <none>
I1009 05:50:47.919] Schedule:                      59 23 31 2 *
I1009 05:50:47.919] Concurrency Policy:            Allow
I1009 05:50:47.919] Suspend:                       False
I1009 05:50:47.920] Successful Job History Limit:  3
I1009 05:50:47.920] Failed Job History Limit:      1
I1009 05:50:47.920] Starting Deadline Seconds:     <unset>
I1009 05:50:47.920] Selector:                      <unset>
I1009 05:50:47.920] Parallelism:                   <unset>
I1009 05:50:47.920] Completions:                   <unset>
I1009 05:50:47.920] Pod Template:
I1009 05:50:47.920]   Labels:  run=pi
... skipping 16 lines ...
I1009 05:50:47.923] Last Schedule Time:  <unset>
I1009 05:50:47.923] Active Jobs:         <none>
I1009 05:50:47.923] Events:              <none>
I1009 05:50:48.056] Successful
I1009 05:50:48.057] message:job.batch/test-job
I1009 05:50:48.057] has:job.batch/test-job
W1009 05:50:48.175] E1009 05:50:48.174758   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:48.276] batch.sh:48: Successful get jobs {{range.items}}{{.metadata.name}}{{end}}: 
I1009 05:50:48.337] (Bjob.batch/test-job created
W1009 05:50:48.438] I1009 05:50:48.326650   52917 event.go:262] Event(v1.ObjectReference{Kind:"Job", Namespace:"test-jobs", Name:"test-job", UID:"4ee8017a-08af-4332-83cf-c7cc15f9f2bc", APIVersion:"batch/v1", ResourceVersion:"1427", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-job-tn942
W1009 05:50:48.439] E1009 05:50:48.332096   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:48.463] E1009 05:50:48.462841   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:48.564] batch.sh:53: Successful get job/test-job --namespace=test-jobs {{.metadata.name}}: test-job
I1009 05:50:48.573] (BNAME       COMPLETIONS   DURATION   AGE
I1009 05:50:48.573] test-job   0/1           0s         0s
W1009 05:50:48.674] E1009 05:50:48.618479   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:48.775] Name:           test-job
I1009 05:50:48.775] Namespace:      test-jobs
I1009 05:50:48.775] Selector:       controller-uid=4ee8017a-08af-4332-83cf-c7cc15f9f2bc
I1009 05:50:48.776] Labels:         controller-uid=4ee8017a-08af-4332-83cf-c7cc15f9f2bc
I1009 05:50:48.776]                 job-name=test-job
I1009 05:50:48.776]                 run=pi
I1009 05:50:48.776] Annotations:    cronjob.kubernetes.io/instantiate: manual
I1009 05:50:48.776] Controlled By:  CronJob/pi
I1009 05:50:48.776] Parallelism:    1
I1009 05:50:48.777] Completions:    1
I1009 05:50:48.777] Start Time:     Wed, 09 Oct 2019 05:50:48 +0000
I1009 05:50:48.777] Pods Statuses:  1 Running / 0 Succeeded / 0 Failed
I1009 05:50:48.777] Pod Template:
I1009 05:50:48.777]   Labels:  controller-uid=4ee8017a-08af-4332-83cf-c7cc15f9f2bc
I1009 05:50:48.777]            job-name=test-job
I1009 05:50:48.778]            run=pi
I1009 05:50:48.778]   Containers:
I1009 05:50:48.778]    pi:
... skipping 15 lines ...
I1009 05:50:48.780]   Type    Reason            Age   From            Message
I1009 05:50:48.780]   ----    ------            ----  ----            -------
I1009 05:50:48.781]   Normal  SuccessfulCreate  0s    job-controller  Created pod: test-job-tn942
I1009 05:50:48.823] job.batch "test-job" deleted
I1009 05:50:48.946] cronjob.batch "pi" deleted
I1009 05:50:49.074] namespace "test-jobs" deleted
W1009 05:50:49.177] E1009 05:50:49.176652   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:49.335] E1009 05:50:49.334545   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:49.467] E1009 05:50:49.466315   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:49.621] E1009 05:50:49.620403   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:50.179] E1009 05:50:50.178545   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:50.337] E1009 05:50:50.336341   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:50.469] E1009 05:50:50.468518   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:50.622] E1009 05:50:50.622018   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:51.181] E1009 05:50:51.180518   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:51.338] E1009 05:50:51.337655   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:51.471] E1009 05:50:51.470554   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:51.624] E1009 05:50:51.623635   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:51.665] I1009 05:50:51.664648   52917 namespace_controller.go:185] Namespace has been deleted test-service-accounts
W1009 05:50:52.183] E1009 05:50:52.182465   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:52.340] E1009 05:50:52.339957   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:52.473] E1009 05:50:52.472390   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:52.625] E1009 05:50:52.625167   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:53.184] E1009 05:50:53.184216   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:53.342] E1009 05:50:53.341806   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:53.475] E1009 05:50:53.474592   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:53.628] E1009 05:50:53.627257   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:54.186] E1009 05:50:54.185774   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:54.288] +++ exit code: 0
I1009 05:50:54.339] Recording: run_create_job_tests
I1009 05:50:54.340] Running command: run_create_job_tests
I1009 05:50:54.386] 
I1009 05:50:54.391] +++ Running case: test-cmd.run_create_job_tests 
I1009 05:50:54.396] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:54.400] +++ command: run_create_job_tests
I1009 05:50:54.416] +++ [1009 05:50:54] Creating namespace namespace-1570600254-22954
I1009 05:50:54.516] namespace/namespace-1570600254-22954 created
W1009 05:50:54.617] E1009 05:50:54.344021   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:54.618] E1009 05:50:54.476673   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:54.629] E1009 05:50:54.629108   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:54.731] Context "test" modified.
I1009 05:50:54.737] job.batch/test-job created
W1009 05:50:54.838] I1009 05:50:54.733656   52917 event.go:262] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1570600254-22954", Name:"test-job", UID:"bf7285a2-120e-44db-b862-9672a011dc0a", APIVersion:"batch/v1", ResourceVersion:"1445", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-job-d7nsc
I1009 05:50:54.939] create.sh:86: Successful get job test-job {{(index .spec.template.spec.containers 0).image}}: k8s.gcr.io/nginx:test-cmd
I1009 05:50:55.004] (Bjob.batch "test-job" deleted
W1009 05:50:55.122] I1009 05:50:55.121632   52917 event.go:262] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1570600254-22954", Name:"test-job-pi", UID:"72f3edb5-d9f2-4d62-8341-5c00eca426ab", APIVersion:"batch/v1", ResourceVersion:"1453", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-job-pi-q72fl
W1009 05:50:55.191] E1009 05:50:55.190164   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:55.292] job.batch/test-job-pi created
I1009 05:50:55.293] create.sh:92: Successful get job test-job-pi {{(index .spec.template.spec.containers 0).image}}: k8s.gcr.io/perl
I1009 05:50:55.394] (Bjob.batch "test-job-pi" deleted
W1009 05:50:55.495] E1009 05:50:55.345747   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:55.497] E1009 05:50:55.479116   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:55.510] kubectl run --generator=cronjob/v1beta1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
I1009 05:50:55.610] cronjob.batch/test-pi created
I1009 05:50:55.676] job.batch/my-pi created
W1009 05:50:55.776] E1009 05:50:55.631047   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:55.777] I1009 05:50:55.673981   52917 event.go:262] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1570600254-22954", Name:"my-pi", UID:"9a759d39-8fbd-40ff-a7a2-eb4a622570fe", APIVersion:"batch/v1", ResourceVersion:"1462", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: my-pi-6qpzp
I1009 05:50:55.878] Successful
I1009 05:50:55.879] message:[perl -Mbignum=bpi -wle print bpi(10)]
I1009 05:50:55.880] has:perl -Mbignum=bpi -wle print bpi(10)
I1009 05:50:55.913] job.batch "my-pi" deleted
I1009 05:50:56.057] cronjob.batch "test-pi" deleted
... skipping 2 lines ...
I1009 05:50:56.165] Running command: run_pod_templates_tests
I1009 05:50:56.210] 
I1009 05:50:56.215] +++ Running case: test-cmd.run_pod_templates_tests 
I1009 05:50:56.222] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:56.228] +++ command: run_pod_templates_tests
I1009 05:50:56.245] +++ [1009 05:50:56] Creating namespace namespace-1570600256-20513
W1009 05:50:56.346] E1009 05:50:56.191937   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:56.348] E1009 05:50:56.347331   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:56.448] namespace/namespace-1570600256-20513 created
I1009 05:50:56.455] Context "test" modified.
I1009 05:50:56.467] +++ [1009 05:50:56] Testing pod templates
W1009 05:50:56.569] E1009 05:50:56.481475   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:56.634] E1009 05:50:56.633287   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:56.735] core.sh:1415: Successful get podtemplates {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:56.835] (Bpodtemplate/nginx created
W1009 05:50:56.936] I1009 05:50:56.831862   49360 controller.go:606] quota admission added evaluator for: podtemplates
I1009 05:50:57.038] core.sh:1419: Successful get podtemplates {{range.items}}{{.metadata.name}}:{{end}}: nginx:
I1009 05:50:57.088] (BNAME    CONTAINERS   IMAGES   POD LABELS
I1009 05:50:57.089] nginx   nginx        nginx    name=nginx
W1009 05:50:57.194] E1009 05:50:57.194015   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:57.350] E1009 05:50:57.349508   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:57.451] core.sh:1427: Successful get podtemplates {{range.items}}{{.metadata.name}}:{{end}}: nginx:
I1009 05:50:57.473] (Bpodtemplate "nginx" deleted
W1009 05:50:57.573] E1009 05:50:57.483332   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:57.636] E1009 05:50:57.635254   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:57.737] core.sh:1431: Successful get podtemplate {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:50:57.737] (B+++ exit code: 0
I1009 05:50:57.738] Recording: run_service_tests
I1009 05:50:57.738] Running command: run_service_tests
I1009 05:50:57.745] 
I1009 05:50:57.749] +++ Running case: test-cmd.run_service_tests 
I1009 05:50:57.753] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:50:57.758] +++ command: run_service_tests
I1009 05:50:57.859] Context "test" modified.
I1009 05:50:57.873] +++ [1009 05:50:57] Testing kubectl(v1:services)
I1009 05:50:58.014] core.sh:858: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:50:58.285] (Bservice/redis-master created
W1009 05:50:58.386] E1009 05:50:58.195952   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:58.387] E1009 05:50:58.351815   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:58.486] E1009 05:50:58.485371   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:58.587] core.sh:862: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I1009 05:50:58.587] (B
I1009 05:50:58.593] core.sh:864: FAIL!
I1009 05:50:58.594] Describe services redis-master
I1009 05:50:58.594]   Expected Match: Name:
I1009 05:50:58.594]   Not found in:
I1009 05:50:58.594] Name:              redis-master
I1009 05:50:58.594] Namespace:         default
I1009 05:50:58.594] Labels:            app=redis
... skipping 8 lines ...
I1009 05:50:58.595] Endpoints:         <none>
I1009 05:50:58.595] Session Affinity:  None
I1009 05:50:58.595] Events:            <none>
I1009 05:50:58.596] (B
I1009 05:50:58.596] 864 /go/src/k8s.io/kubernetes/test/cmd/../../test/cmd/core.sh
I1009 05:50:58.596] (B
W1009 05:50:58.696] E1009 05:50:58.637860   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:50:58.798] core.sh:866: Successful describe
I1009 05:50:58.798] Name:              redis-master
I1009 05:50:58.798] Namespace:         default
I1009 05:50:58.798] Labels:            app=redis
I1009 05:50:58.798]                    role=master
I1009 05:50:58.799]                    tier=backend
... skipping 35 lines ...
I1009 05:50:59.050] Port:              <unset>  6379/TCP
I1009 05:50:59.050] TargetPort:        6379/TCP
I1009 05:50:59.050] Endpoints:         <none>
I1009 05:50:59.050] Session Affinity:  None
I1009 05:50:59.050] Events:            <none>
I1009 05:50:59.050] (B
W1009 05:50:59.198] E1009 05:50:59.197460   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:50:59.219] I1009 05:50:59.218924   52917 namespace_controller.go:185] Namespace has been deleted test-jobs
I1009 05:50:59.320] 
I1009 05:50:59.320] FAIL!
I1009 05:50:59.321] Describe services
I1009 05:50:59.321]   Expected Match: Name:
I1009 05:50:59.321]   Not found in:
I1009 05:50:59.321] Name:              kubernetes
I1009 05:50:59.321] Namespace:         default
I1009 05:50:59.321] Labels:            component=apiserver
... skipping 129 lines ...
I1009 05:50:59.957]   - port: 6379
I1009 05:50:59.957]     targetPort: 6379
I1009 05:50:59.957]   selector:
I1009 05:50:59.957]     role: padawan
I1009 05:50:59.957] status:
I1009 05:50:59.957]   loadBalancer: {}
W1009 05:51:00.058] E1009 05:50:59.353960   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:00.058] E1009 05:50:59.487299   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:00.059] E1009 05:50:59.640503   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:00.159] apiVersion: v1
I1009 05:51:00.159] kind: Service
I1009 05:51:00.160] metadata:
I1009 05:51:00.160]   creationTimestamp: "2019-10-09T05:50:58Z"
I1009 05:51:00.160]   labels:
I1009 05:51:00.160]     app: redis
... skipping 14 lines ...
I1009 05:51:00.161]     role: padawan
I1009 05:51:00.161]   sessionAffinity: None
I1009 05:51:00.161]   type: ClusterIP
I1009 05:51:00.161] status:
I1009 05:51:00.161]   loadBalancer: {}
I1009 05:51:00.218] service/redis-master selector updated
W1009 05:51:00.319] E1009 05:51:00.199706   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:00.357] E1009 05:51:00.356863   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:00.458] core.sh:890: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: padawan:
I1009 05:51:00.490] (Bservice/redis-master selector updated
W1009 05:51:00.592] E1009 05:51:00.489134   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:00.648] E1009 05:51:00.647866   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:00.749] core.sh:894: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
I1009 05:51:00.790] (BapiVersion: v1
I1009 05:51:00.791] kind: Service
I1009 05:51:00.791] metadata:
I1009 05:51:00.791]   creationTimestamp: "2019-10-09T05:50:58Z"
I1009 05:51:00.791]   labels:
... skipping 14 lines ...
I1009 05:51:00.793]   selector:
I1009 05:51:00.793]     role: padawan
I1009 05:51:00.793]   sessionAffinity: None
I1009 05:51:00.793]   type: ClusterIP
I1009 05:51:00.793] status:
I1009 05:51:00.793]   loadBalancer: {}
W1009 05:51:00.894] error: you must specify resources by --filename when --local is set.
W1009 05:51:00.894] Example resource specifications include:
W1009 05:51:00.894]    '-f rsrc.yaml'
W1009 05:51:00.895]    '--filename=rsrc.json'
I1009 05:51:01.036] core.sh:898: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
I1009 05:51:01.288] (Bcore.sh:905: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I1009 05:51:01.399] (Bservice "redis-master" deleted
W1009 05:51:01.500] E1009 05:51:01.202645   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:01.501] E1009 05:51:01.359831   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:01.501] E1009 05:51:01.491221   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:01.602] core.sh:912: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:01.684] (Bcore.sh:916: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:01.942] (Bservice/redis-master created
W1009 05:51:02.043] E1009 05:51:01.651007   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:02.145] core.sh:920: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I1009 05:51:02.236] (Bcore.sh:924: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I1009 05:51:02.480] (Bservice/service-v1-test created
W1009 05:51:02.581] E1009 05:51:02.205339   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:02.581] E1009 05:51:02.362990   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:02.582] E1009 05:51:02.492845   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:02.653] E1009 05:51:02.652988   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:02.755] core.sh:945: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test:
I1009 05:51:02.875] (Bservice/service-v1-test replaced
I1009 05:51:03.024] core.sh:952: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test:
I1009 05:51:03.137] (Bservice "redis-master" deleted
W1009 05:51:03.238] E1009 05:51:03.207697   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:03.339] service "service-v1-test" deleted
I1009 05:51:03.393] core.sh:960: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:03.520] (Bcore.sh:964: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:03.760] (Bservice/redis-master created
W1009 05:51:03.861] E1009 05:51:03.364870   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:03.862] E1009 05:51:03.494388   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:03.863] E1009 05:51:03.655214   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:04.022] service/redis-slave created
I1009 05:51:04.167] core.sh:969: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:redis-slave:
I1009 05:51:04.288] (BSuccessful
I1009 05:51:04.289] message:NAME           RSRC
I1009 05:51:04.289] kubernetes     145
I1009 05:51:04.289] redis-master   1498
I1009 05:51:04.289] redis-slave    1501
I1009 05:51:04.289] has:redis-master
W1009 05:51:04.390] E1009 05:51:04.209610   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:04.391] E1009 05:51:04.367645   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:04.492] core.sh:979: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:redis-slave:
I1009 05:51:04.547] (Bservice "redis-master" deleted
I1009 05:51:04.555] service "redis-slave" deleted
W1009 05:51:04.657] E1009 05:51:04.496448   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:04.658] E1009 05:51:04.657697   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:04.759] core.sh:986: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:04.839] (Bcore.sh:990: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:04.935] (Bservice/beep-boop created
I1009 05:51:05.073] core.sh:994: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: beep-boop:kubernetes:
I1009 05:51:05.208] (Bcore.sh:998: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: beep-boop:kubernetes:
I1009 05:51:05.325] (Bservice "beep-boop" deleted
W1009 05:51:05.426] E1009 05:51:05.211288   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:05.426] E1009 05:51:05.370562   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:05.498] E1009 05:51:05.498193   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:05.599] core.sh:1005: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
I1009 05:51:05.600] (Bcore.sh:1009: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:51:05.729] (Bservice/testmetadata created
I1009 05:51:05.730] deployment.apps/testmetadata created
W1009 05:51:05.831] E1009 05:51:05.659638   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:05.831] kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead.
W1009 05:51:05.832] I1009 05:51:05.710908   52917 event.go:262] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"default", Name:"testmetadata", UID:"704b27fd-33c6-41ce-aee1-fde9b99e5a23", APIVersion:"apps/v1", ResourceVersion:"1517", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set testmetadata-bd968f46 to 2
W1009 05:51:05.832] I1009 05:51:05.718300   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"default", Name:"testmetadata-bd968f46", UID:"b9c38151-6d9f-4549-bc0f-db4855116858", APIVersion:"apps/v1", ResourceVersion:"1518", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: testmetadata-bd968f46-vqh5h
W1009 05:51:05.832] I1009 05:51:05.722084   52917 event.go:262] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"default", Name:"testmetadata-bd968f46", UID:"b9c38151-6d9f-4549-bc0f-db4855116858", APIVersion:"apps/v1", ResourceVersion:"1518", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: testmetadata-bd968f46-p49lt
I1009 05:51:05.933] core.sh:1013: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: testmetadata:
I1009 05:51:06.000] (Bcore.sh:1014: Successful get service testmetadata {{.metadata.annotations}}: map[zone-context:home]
I1009 05:51:06.145] (Bservice/exposemetadata exposed
W1009 05:51:06.246] E1009 05:51:06.213010   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:06.347] core.sh:1020: Successful get service exposemetadata {{.metadata.annotations}}: map[zone-context:work]
I1009 05:51:06.410] (Bservice "exposemetadata" deleted
I1009 05:51:06.418] service "testmetadata" deleted
W1009 05:51:06.519] E1009 05:51:06.372492   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:06.519] E1009 05:51:06.500502   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:06.620] deployment.apps "testmetadata" deleted
I1009 05:51:06.620] +++ exit code: 0
I1009 05:51:06.654] Recording: run_daemonset_tests
I1009 05:51:06.654] Running command: run_daemonset_tests
I1009 05:51:06.697] 
I1009 05:51:06.700] +++ Running case: test-cmd.run_daemonset_tests 
I1009 05:51:06.704] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:51:06.708] +++ command: run_daemonset_tests
I1009 05:51:06.725] +++ [1009 05:51:06] Creating namespace namespace-1570600266-14679
I1009 05:51:06.821] namespace/namespace-1570600266-14679 created
W1009 05:51:06.923] E1009 05:51:06.661864   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:07.024] Context "test" modified.
I1009 05:51:07.024] +++ [1009 05:51:06] Testing kubectl(v1:daemonsets)
I1009 05:51:07.073] apps.sh:30: Successful get daemonsets {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:51:07.331] (Bdaemonset.apps/bind created
W1009 05:51:07.432] E1009 05:51:07.215325   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:07.433] I1009 05:51:07.327243   49360 controller.go:606] quota admission added evaluator for: daemonsets.apps
W1009 05:51:07.433] I1009 05:51:07.340578   49360 controller.go:606] quota admission added evaluator for: controllerrevisions.apps
W1009 05:51:07.434] E1009 05:51:07.375206   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:07.504] E1009 05:51:07.503446   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:07.605] apps.sh:34: Successful get daemonsets bind {{.metadata.generation}}: 1
I1009 05:51:07.775] (Bdaemonset.apps/bind configured
W1009 05:51:07.876] E1009 05:51:07.663852   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:07.977] apps.sh:37: Successful get daemonsets bind {{.metadata.generation}}: 1
I1009 05:51:08.057] (Bdaemonset.apps/bind image updated
I1009 05:51:08.197] apps.sh:40: Successful get daemonsets bind {{.metadata.generation}}: 2
I1009 05:51:08.323] (Bdaemonset.apps/bind env updated
W1009 05:51:08.424] E1009 05:51:08.217707   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:08.425] E1009 05:51:08.377345   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:08.506] E1009 05:51:08.505594   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:08.607] apps.sh:42: Successful get daemonsets bind {{.metadata.generation}}: 3
I1009 05:51:08.607] (Bdaemonset.apps/bind resource requirements updated
W1009 05:51:08.709] E1009 05:51:08.665607   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:08.809] apps.sh:44: Successful get daemonsets bind {{.metadata.generation}}: 4
I1009 05:51:08.849] (Bdaemonset.apps/bind restarted
I1009 05:51:08.986] apps.sh:48: Successful get daemonsets bind {{.metadata.generation}}: 5
I1009 05:51:09.102] (Bdaemonset.apps "bind" deleted
I1009 05:51:09.146] +++ exit code: 0
I1009 05:51:09.207] Recording: run_daemonset_history_tests
I1009 05:51:09.207] Running command: run_daemonset_history_tests
I1009 05:51:09.255] 
I1009 05:51:09.258] +++ Running case: test-cmd.run_daemonset_history_tests 
I1009 05:51:09.263] +++ working dir: /go/src/k8s.io/kubernetes
I1009 05:51:09.266] +++ command: run_daemonset_history_tests
I1009 05:51:09.286] +++ [1009 05:51:09] Creating namespace namespace-1570600269-13470
W1009 05:51:09.387] E1009 05:51:09.220559   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:09.388] E1009 05:51:09.379117   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:09.488] namespace/namespace-1570600269-13470 created
I1009 05:51:09.516] Context "test" modified.
I1009 05:51:09.534] +++ [1009 05:51:09] Testing kubectl(v1:daemonsets, v1:controllerrevisions)
W1009 05:51:09.635] E1009 05:51:09.508168   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:09.668] E1009 05:51:09.667534   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:09.769] apps.sh:66: Successful get daemonsets {{range.items}}{{.metadata.name}}:{{end}}: 
I1009 05:51:09.927] (Bdaemonset.apps/bind created
I1009 05:51:10.108] apps.sh:70: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"apps/v1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"labels":{"service":"bind"},"name":"bind","namespace":"namespace-1570600269-13470"},"spec":{"selector":{"matchLabels":{"service":"bind"}},"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I1009 05:51:10.109]  kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true]:
I1009 05:51:10.255] (Bdaemonset.apps/bind skipped rollback (current template already matches revision 1)
W1009 05:51:10.356] E1009 05:51:10.222423   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:10.381] E1009 05:51:10.380992   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:10.482] apps.sh:73: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I1009 05:51:10.572] (Bapps.sh:74: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I1009 05:51:10.862] (Bdaemonset.apps/bind configured
W1009 05:51:10.963] E1009 05:51:10.510658   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:10.964] E1009 05:51:10.669878   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1009 05:51:11.065] apps.sh:77: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
I1009 05:51:11.148] (Bapps.sh:78: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I1009 05:51:11.280] (Bapps.sh:79: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I1009 05:51:11.414] (Bapps.sh:80: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"apps/v1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"labels":{"service":"bind"},"name":"bind","namespace":"namespace-1570600269-13470"},"spec":{"selector":{"matchLabels":{"service":"bind"}},"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I1009 05:51:11.415]  kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true]:map[deprecated.daemonset.template.generation:2 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"apps/v1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"labels":{"service":"bind"},"name":"bind","namespace":"namespace-1570600269-13470"},"spec":{"selector":{"matchLabels":{"service":"bind"}},"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:latest","name":"kubernetes-pause"},{"image":"k8s.gcr.io/nginx:test-cmd","name":"app"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I1009 05:51:11.415]  kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true]:
... skipping 5 lines ...
I1009 05:51:11.548]     Port:	<none>
I1009 05:51:11.548]     Host Port:	<none>
I1009 05:51:11.548]     Environment:	<none>
I1009 05:51:11.548]     Mounts:	<none>
I1009 05:51:11.548]   Volumes:	<none>
I1009 05:51:11.548]  (dry run)
W1009 05:51:11.649] E1009 05:51:11.224321   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
W1009 05:51:11.650] E1009 05:51:11.383061   52917 reflector.go:153] k8s.io/client-go/metadata/metadatainformer/informer.go:89: Failed to list *v1.PartialObjectMetadata: the server could not find the requested resource