This job view page is being replaced by Spyglass soon. Check out the new job view.
PRvinayakankugoyal: Run etcd as non root.
ResultFAILURE
Tests 1 failed / 2930 succeeded
Started2020-08-13 01:24
Elapsed38m43s
Revisioncd1cb2dd4903e6605ed006dbf4fc0d2e9260105e
Refs 93455

Test Failures


k8s.io/kubernetes/test/integration/auth TestDynamicClientBuilder 25s

go test -v k8s.io/kubernetes/test/integration/auth -run TestDynamicClientBuilder$
=== RUN   TestDynamicClientBuilder
I0813 01:50:53.079317  114703 feature_gate.go:243] feature gates: &{map[TokenRequest:true]}
I0813 01:50:53.642230  114703 client.go:360] parsed scheme: "passthrough"
I0813 01:50:53.642288  114703 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:50:53.642302  114703 clientconn.go:948] ClientConn switching balancer to "pick_first"
I0813 01:50:53.642550  114703 balancer_conn_wrappers.go:78] pickfirstBalancer: HandleSubConnStateChange: 0xc0257a7c60, {CONNECTING <nil>}
I0813 01:50:53.643088  114703 balancer_conn_wrappers.go:78] pickfirstBalancer: HandleSubConnStateChange: 0xc0257a7c60, {READY <nil>}
I0813 01:50:53.643899  114703 controlbuf.go:508] transport: loopyWriter.run returning. connection error: desc = "transport is closing"
I0813 01:50:54.411557  114703 plugins.go:84] Registered admission plugin "NamespaceLifecycle"
I0813 01:50:54.411597  114703 plugins.go:84] Registered admission plugin "ValidatingAdmissionWebhook"
I0813 01:50:54.411605  114703 plugins.go:84] Registered admission plugin "MutatingAdmissionWebhook"
I0813 01:50:54.411612  114703 plugins.go:84] Registered admission plugin "AlwaysAdmit"
I0813 01:50:54.411618  114703 plugins.go:84] Registered admission plugin "AlwaysPullImages"
I0813 01:50:54.411624  114703 plugins.go:84] Registered admission plugin "LimitPodHardAntiAffinityTopology"
I0813 01:50:54.411631  114703 plugins.go:84] Registered admission plugin "DefaultTolerationSeconds"
I0813 01:50:54.411637  114703 plugins.go:84] Registered admission plugin "DefaultIngressClass"
I0813 01:50:54.411646  114703 plugins.go:84] Registered admission plugin "AlwaysDeny"
I0813 01:50:54.411656  114703 plugins.go:84] Registered admission plugin "EventRateLimit"
I0813 01:50:54.411663  114703 plugins.go:84] Registered admission plugin "DenyEscalatingExec"
I0813 01:50:54.411668  114703 plugins.go:84] Registered admission plugin "DenyExecOnPrivileged"
I0813 01:50:54.411675  114703 plugins.go:84] Registered admission plugin "ExtendedResourceToleration"
I0813 01:50:54.411681  114703 plugins.go:84] Registered admission plugin "OwnerReferencesPermissionEnforcement"
I0813 01:50:54.411695  114703 plugins.go:84] Registered admission plugin "ImagePolicyWebhook"
I0813 01:50:54.411701  114703 plugins.go:84] Registered admission plugin "LimitRanger"
I0813 01:50:54.411708  114703 plugins.go:84] Registered admission plugin "NamespaceAutoProvision"
I0813 01:50:54.411714  114703 plugins.go:84] Registered admission plugin "NamespaceExists"
I0813 01:50:54.411720  114703 plugins.go:84] Registered admission plugin "NodeRestriction"
I0813 01:50:54.411727  114703 plugins.go:84] Registered admission plugin "TaintNodesByCondition"
I0813 01:50:54.411733  114703 plugins.go:84] Registered admission plugin "PersistentVolumeLabel"
I0813 01:50:54.411739  114703 plugins.go:84] Registered admission plugin "PodNodeSelector"
I0813 01:50:54.411749  114703 plugins.go:84] Registered admission plugin "PodPreset"
I0813 01:50:54.411755  114703 plugins.go:84] Registered admission plugin "PodTolerationRestriction"
I0813 01:50:54.411761  114703 plugins.go:84] Registered admission plugin "RuntimeClass"
I0813 01:50:54.411768  114703 plugins.go:84] Registered admission plugin "ResourceQuota"
I0813 01:50:54.411795  114703 plugins.go:84] Registered admission plugin "PodSecurityPolicy"
I0813 01:50:54.411805  114703 plugins.go:84] Registered admission plugin "Priority"
I0813 01:50:54.411812  114703 plugins.go:84] Registered admission plugin "SecurityContextDeny"
I0813 01:50:54.411826  114703 plugins.go:84] Registered admission plugin "ServiceAccount"
I0813 01:50:54.411832  114703 plugins.go:84] Registered admission plugin "DefaultStorageClass"
I0813 01:50:54.411838  114703 plugins.go:84] Registered admission plugin "PersistentVolumeClaimResize"
I0813 01:50:54.411845  114703 plugins.go:84] Registered admission plugin "StorageObjectInUseProtection"
I0813 01:50:54.411851  114703 plugins.go:84] Registered admission plugin "CertificateApproval"
I0813 01:50:54.411856  114703 plugins.go:84] Registered admission plugin "CertificateSigning"
I0813 01:50:54.411862  114703 plugins.go:84] Registered admission plugin "CertificateSubjectRestriction"
I0813 01:50:54.412203  114703 interface.go:400] Looking for default routes with IPv4 addresses
I0813 01:50:54.412224  114703 interface.go:405] Default route transits interface "eth0"
I0813 01:50:54.412420  114703 interface.go:208] Interface eth0 is up
I0813 01:50:54.412544  114703 interface.go:256] Interface "eth0" has 2 addresses :[10.60.65.31/24 fe80::30c6:3dff:fe5b:7c3e/64].
I0813 01:50:54.412574  114703 interface.go:223] Checking addr  10.60.65.31/24.
I0813 01:50:54.412585  114703 interface.go:230] IP found 10.60.65.31
I0813 01:50:54.412593  114703 interface.go:262] Found valid IPv4 address 10.60.65.31 for interface "eth0".
I0813 01:50:54.412601  114703 interface.go:411] Found active IP 10.60.65.31 
I0813 01:50:54.412618  114703 services.go:51] Setting service IP to "10.0.0.1" (read-write).
I0813 01:50:55.753903  114703 serving.go:325] Generated self-signed cert (/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.crt, /tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.key)
I0813 01:50:55.753941  114703 server.go:625] external host was not specified, using 10.60.65.31
I0813 01:50:55.770888  114703 dynamic_serving_content.go:111] Loaded a new cert/key pair for "serving-cert::/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.crt::/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.key"
I0813 01:50:56.868482  114703 dynamic_cafile_content.go:129] Loaded a new CA Bundle and Verifier for "client-ca-bundle::/tmp/test-integration-TestDynamicClientBuilder179258641/client-ca.crt209897323"
I0813 01:50:56.868667  114703 dynamic_cafile_content.go:129] Loaded a new CA Bundle and Verifier for "request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956"
W0813 01:50:56.868710  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.868724  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.868757  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869089  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869159  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869178  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869276  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869287  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869297  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869310  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.869325  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.870525  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.870655  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.870742  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.870807  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.870914  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.871006  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.871327  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.871618  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:50:56.871789  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I0813 01:50:56.871860  114703 plugins.go:158] Loaded 11 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,ServiceAccount,TaintNodesByCondition,Priority,DefaultTolerationSeconds,DefaultStorageClass,StorageObjectInUseProtection,RuntimeClass,DefaultIngressClass,MutatingAdmissionWebhook.
I0813 01:50:56.871901  114703 plugins.go:161] Loaded 10 validating admission controller(s) successfully in the following order: LimitRanger,ServiceAccount,Priority,PersistentVolumeClaimResize,RuntimeClass,CertificateApproval,CertificateSigning,CertificateSubjectRestriction,ValidatingAdmissionWebhook,ResourceQuota.
I0813 01:50:56.871988  114703 services.go:51] Setting service IP to "10.0.0.1" (read-write).
I0813 01:50:56.872249  114703 dynamic_cafile_content.go:129] Loaded a new CA Bundle and Verifier for "client-ca-bundle::/tmp/test-integration-TestDynamicClientBuilder179258641/client-ca.crt209897323"
I0813 01:50:56.872391  114703 dynamic_cafile_content.go:129] Loaded a new CA Bundle and Verifier for "request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956"
I0813 01:50:56.872502  114703 services.go:51] Setting service IP to "10.0.0.1" (read-write).
I0813 01:50:56.872515  114703 master.go:271] Using reconciler: lease
I0813 01:50:56.872622  114703 storage_factory.go:285] storing apiServerIPInfo in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.872785  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.873357  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.885532  114703 config.go:637] Not requested to run hook priority-and-fairness-config-consumer
I0813 01:50:56.887308  114703 storage_factory.go:285] storing podtemplates in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.887499  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.887528  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.889796  114703 store.go:1378] Monitoring podtemplates count at <storage-prefix>//podtemplates
I0813 01:50:56.890050  114703 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.890633  114703 reflector.go:243] Listing and watching *core.PodTemplate from storage/cacher.go:/podtemplates
I0813 01:50:56.890723  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.890845  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.892174  114703 cacher.go:402] cacher (*core.PodTemplate): initialized
I0813 01:50:56.892317  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.892505  114703 store.go:1378] Monitoring events count at <storage-prefix>//events
I0813 01:50:56.892672  114703 storage_factory.go:285] storing limitranges in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.892690  114703 reflector.go:243] Listing and watching *core.Event from storage/cacher.go:/events
I0813 01:50:56.893029  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.893136  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.894910  114703 store.go:1378] Monitoring limitranges count at <storage-prefix>//limitranges
I0813 01:50:56.895132  114703 reflector.go:243] Listing and watching *core.LimitRange from storage/cacher.go:/limitranges
I0813 01:50:56.895120  114703 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.895437  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.895471  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.895836  114703 cacher.go:402] cacher (*core.Event): initialized
I0813 01:50:56.895915  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.896474  114703 store.go:1378] Monitoring resourcequotas count at <storage-prefix>//resourcequotas
I0813 01:50:56.896623  114703 reflector.go:243] Listing and watching *core.ResourceQuota from storage/cacher.go:/resourcequotas
I0813 01:50:56.896509  114703 cacher.go:402] cacher (*core.LimitRange): initialized
I0813 01:50:56.897328  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.897426  114703 storage_factory.go:285] storing secrets in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.897737  114703 cacher.go:402] cacher (*core.ResourceQuota): initialized
I0813 01:50:56.897751  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.898111  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.898215  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.899146  114703 store.go:1378] Monitoring secrets count at <storage-prefix>//secrets
I0813 01:50:56.899300  114703 reflector.go:243] Listing and watching *core.Secret from storage/cacher.go:/secrets
I0813 01:50:56.899317  114703 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.899586  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.899603  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.901998  114703 cacher.go:402] cacher (*core.Secret): initialized
I0813 01:50:56.902016  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.902341  114703 store.go:1378] Monitoring persistentvolumes count at <storage-prefix>//persistentvolumes
I0813 01:50:56.902505  114703 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.902585  114703 reflector.go:243] Listing and watching *core.PersistentVolume from storage/cacher.go:/persistentvolumes
I0813 01:50:56.902606  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.902624  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.903280  114703 store.go:1378] Monitoring persistentvolumeclaims count at <storage-prefix>//persistentvolumeclaims
I0813 01:50:56.903370  114703 reflector.go:243] Listing and watching *core.PersistentVolumeClaim from storage/cacher.go:/persistentvolumeclaims
I0813 01:50:56.903453  114703 storage_factory.go:285] storing configmaps in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.903572  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.903590  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.903913  114703 cacher.go:402] cacher (*core.PersistentVolume): initialized
I0813 01:50:56.903927  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.904128  114703 store.go:1378] Monitoring configmaps count at <storage-prefix>//configmaps
I0813 01:50:56.904306  114703 cacher.go:402] cacher (*core.PersistentVolumeClaim): initialized
I0813 01:50:56.904331  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.904500  114703 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.904221  114703 reflector.go:243] Listing and watching *core.ConfigMap from storage/cacher.go:/configmaps
I0813 01:50:56.904692  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.904794  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.905767  114703 store.go:1378] Monitoring namespaces count at <storage-prefix>//namespaces
I0813 01:50:56.905876  114703 reflector.go:243] Listing and watching *core.Namespace from storage/cacher.go:/namespaces
I0813 01:50:56.906039  114703 storage_factory.go:285] storing endpoints in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.906199  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.906215  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.906400  114703 cacher.go:402] cacher (*core.ConfigMap): initialized
I0813 01:50:56.906435  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.908600  114703 cacher.go:402] cacher (*core.Namespace): initialized
I0813 01:50:56.908613  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.909455  114703 store.go:1378] Monitoring endpoints count at <storage-prefix>//services/endpoints
I0813 01:50:56.909540  114703 reflector.go:243] Listing and watching *core.Endpoints from storage/cacher.go:/services/endpoints
I0813 01:50:56.909640  114703 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.909778  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.909799  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.910569  114703 cacher.go:402] cacher (*core.Endpoints): initialized
I0813 01:50:56.910586  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.912028  114703 store.go:1378] Monitoring nodes count at <storage-prefix>//minions
I0813 01:50:56.912146  114703 reflector.go:243] Listing and watching *core.Node from storage/cacher.go:/minions
I0813 01:50:56.912814  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.913435  114703 cacher.go:402] cacher (*core.Node): initialized
I0813 01:50:56.913544  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.913464  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.913869  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.915648  114703 store.go:1378] Monitoring pods count at <storage-prefix>//pods
I0813 01:50:56.915843  114703 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.915942  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.915958  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.916015  114703 reflector.go:243] Listing and watching *core.Pod from storage/cacher.go:/pods
I0813 01:50:56.917190  114703 cacher.go:402] cacher (*core.Pod): initialized
I0813 01:50:56.917339  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.919406  114703 store.go:1378] Monitoring serviceaccounts count at <storage-prefix>//serviceaccounts
I0813 01:50:56.919533  114703 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.919640  114703 reflector.go:243] Listing and watching *core.ServiceAccount from storage/cacher.go:/serviceaccounts
I0813 01:50:56.919780  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.919836  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.921282  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.921312  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.921798  114703 cacher.go:402] cacher (*core.ServiceAccount): initialized
I0813 01:50:56.921810  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.922285  114703 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.922389  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.922406  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.923261  114703 store.go:1378] Monitoring replicationcontrollers count at <storage-prefix>//controllers
I0813 01:50:56.923501  114703 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:56.923759  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:56.924128  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:56.923917  114703 reflector.go:243] Listing and watching *core.ReplicationController from storage/cacher.go:/controllers
I0813 01:50:56.926378  114703 store.go:1378] Monitoring services count at <storage-prefix>//services/specs
I0813 01:50:56.927512  114703 reflector.go:243] Listing and watching *core.Service from storage/cacher.go:/services/specs
I0813 01:50:56.929127  114703 cacher.go:402] cacher (*core.ReplicationController): initialized
I0813 01:50:56.929238  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:56.930369  114703 cacher.go:402] cacher (*core.Service): initialized
I0813 01:50:56.930481  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.032515  114703 storage_factory.go:285] storing bindings in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.034458  114703 storage_factory.go:285] storing componentstatuses in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.036573  114703 storage_factory.go:285] storing configmaps in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.038977  114703 storage_factory.go:285] storing endpoints in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.041079  114703 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.043118  114703 storage_factory.go:285] storing limitranges in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.044866  114703 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.046398  114703 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.048003  114703 storage_factory.go:285] storing namespaces in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.049929  114703 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.054220  114703 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.056139  114703 storage_factory.go:285] storing nodes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.058748  114703 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.060517  114703 storage_factory.go:285] storing persistentvolumeclaims in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.062578  114703 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.064151  114703 storage_factory.go:285] storing persistentvolumes in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.066193  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.067842  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.069348  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.070793  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.072238  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.073743  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.075179  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.077209  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.078977  114703 storage_factory.go:285] storing pods in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.081131  114703 storage_factory.go:285] storing podtemplates in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.083379  114703 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.084981  114703 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.087964  114703 storage_factory.go:285] storing replicationcontrollers in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.090140  114703 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.091794  114703 storage_factory.go:285] storing resourcequotas in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.093869  114703 storage_factory.go:285] storing secrets in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.096041  114703 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.097661  114703 storage_factory.go:285] storing serviceaccounts in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.099646  114703 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.102304  114703 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.103930  114703 storage_factory.go:285] storing services in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.104056  114703 master.go:539] Enabling API group "authentication.k8s.io".
I0813 01:50:57.104073  114703 master.go:539] Enabling API group "authorization.k8s.io".
I0813 01:50:57.104303  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.104474  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.104502  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.106906  114703 store.go:1378] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I0813 01:50:57.107016  114703 reflector.go:243] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I0813 01:50:57.107112  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.107271  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.107295  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.108205  114703 store.go:1378] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I0813 01:50:57.108430  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.108612  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.108642  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.109263  114703 reflector.go:243] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I0813 01:50:57.109968  114703 cacher.go:402] cacher (*autoscaling.HorizontalPodAutoscaler): initialized
I0813 01:50:57.109982  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.110444  114703 store.go:1378] Monitoring horizontalpodautoscalers.autoscaling count at <storage-prefix>//horizontalpodautoscalers
I0813 01:50:57.110467  114703 master.go:539] Enabling API group "autoscaling".
I0813 01:50:57.110652  114703 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.110806  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.110829  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.110907  114703 reflector.go:243] Listing and watching *autoscaling.HorizontalPodAutoscaler from storage/cacher.go:/horizontalpodautoscalers
I0813 01:50:57.111190  114703 cacher.go:402] cacher (*autoscaling.HorizontalPodAutoscaler): initialized
I0813 01:50:57.111203  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.112712  114703 store.go:1378] Monitoring jobs.batch count at <storage-prefix>//jobs
I0813 01:50:57.112877  114703 cacher.go:402] cacher (*autoscaling.HorizontalPodAutoscaler): initialized
I0813 01:50:57.112893  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.113349  114703 reflector.go:243] Listing and watching *batch.Job from storage/cacher.go:/jobs
I0813 01:50:57.113391  114703 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.113568  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.113587  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.115984  114703 store.go:1378] Monitoring cronjobs.batch count at <storage-prefix>//cronjobs
I0813 01:50:57.116010  114703 master.go:539] Enabling API group "batch".
I0813 01:50:57.116044  114703 reflector.go:243] Listing and watching *batch.CronJob from storage/cacher.go:/cronjobs
I0813 01:50:57.116199  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.116305  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.116324  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.116957  114703 cacher.go:402] cacher (*batch.Job): initialized
I0813 01:50:57.117271  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.119536  114703 cacher.go:402] cacher (*batch.CronJob): initialized
I0813 01:50:57.119553  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.119810  114703 store.go:1378] Monitoring certificatesigningrequests.certificates.k8s.io count at <storage-prefix>//certificatesigningrequests
I0813 01:50:57.119917  114703 reflector.go:243] Listing and watching *certificates.CertificateSigningRequest from storage/cacher.go:/certificatesigningrequests
I0813 01:50:57.119989  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.120170  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.120196  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.120704  114703 cacher.go:402] cacher (*certificates.CertificateSigningRequest): initialized
I0813 01:50:57.120718  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.122975  114703 store.go:1378] Monitoring certificatesigningrequests.certificates.k8s.io count at <storage-prefix>//certificatesigningrequests
I0813 01:50:57.122999  114703 master.go:539] Enabling API group "certificates.k8s.io".
I0813 01:50:57.123043  114703 reflector.go:243] Listing and watching *certificates.CertificateSigningRequest from storage/cacher.go:/certificatesigningrequests
I0813 01:50:57.123183  114703 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.123393  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.123415  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.124698  114703 store.go:1378] Monitoring leases.coordination.k8s.io count at <storage-prefix>//leases
I0813 01:50:57.124907  114703 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.125032  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.125051  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.125285  114703 reflector.go:243] Listing and watching *coordination.Lease from storage/cacher.go:/leases
I0813 01:50:57.125453  114703 cacher.go:402] cacher (*certificates.CertificateSigningRequest): initialized
I0813 01:50:57.125485  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.126167  114703 cacher.go:402] cacher (*coordination.Lease): initialized
I0813 01:50:57.126186  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.126439  114703 store.go:1378] Monitoring leases.coordination.k8s.io count at <storage-prefix>//leases
I0813 01:50:57.126456  114703 master.go:539] Enabling API group "coordination.k8s.io".
I0813 01:50:57.126496  114703 reflector.go:243] Listing and watching *coordination.Lease from storage/cacher.go:/leases
I0813 01:50:57.126639  114703 storage_factory.go:285] storing endpointslices.discovery.k8s.io in discovery.k8s.io/v1beta1, reading as discovery.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.126769  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.127600  114703 cacher.go:402] cacher (*coordination.Lease): initialized
I0813 01:50:57.127622  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.127847  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.128806  114703 store.go:1378] Monitoring endpointslices.discovery.k8s.io count at <storage-prefix>//endpointslices
I0813 01:50:57.128861  114703 master.go:539] Enabling API group "discovery.k8s.io".
I0813 01:50:57.128945  114703 reflector.go:243] Listing and watching *discovery.EndpointSlice from storage/cacher.go:/endpointslices
I0813 01:50:57.129054  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.129203  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.129222  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.130797  114703 store.go:1378] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I0813 01:50:57.130819  114703 master.go:539] Enabling API group "extensions".
I0813 01:50:57.130968  114703 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.131066  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.131083  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.131251  114703 reflector.go:243] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I0813 01:50:57.132109  114703 cacher.go:402] cacher (*discovery.EndpointSlice): initialized
I0813 01:50:57.132122  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.136667  114703 store.go:1378] Monitoring networkpolicies.networking.k8s.io count at <storage-prefix>//networkpolicies
I0813 01:50:57.136802  114703 reflector.go:243] Listing and watching *networking.NetworkPolicy from storage/cacher.go:/networkpolicies
I0813 01:50:57.137459  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.137644  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.137665  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.137968  114703 cacher.go:402] cacher (*networking.NetworkPolicy): initialized
I0813 01:50:57.137982  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.139101  114703 store.go:1378] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I0813 01:50:57.139213  114703 cacher.go:402] cacher (*networking.Ingress): initialized
I0813 01:50:57.139346  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.139289  114703 reflector.go:243] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I0813 01:50:57.139528  114703 storage_factory.go:285] storing ingressclasses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.139707  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.139725  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.140629  114703 cacher.go:402] cacher (*networking.Ingress): initialized
I0813 01:50:57.140653  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.141401  114703 store.go:1378] Monitoring ingressclasses.networking.k8s.io count at <storage-prefix>//ingressclasses
I0813 01:50:57.141593  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.141807  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.141830  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.142046  114703 reflector.go:243] Listing and watching *networking.IngressClass from storage/cacher.go:/ingressclasses
I0813 01:50:57.142741  114703 store.go:1378] Monitoring ingresses.networking.k8s.io count at <storage-prefix>//ingress
I0813 01:50:57.142921  114703 storage_factory.go:285] storing ingressclasses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.143066  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.143085  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.143285  114703 reflector.go:243] Listing and watching *networking.Ingress from storage/cacher.go:/ingress
I0813 01:50:57.145509  114703 cacher.go:402] cacher (*networking.IngressClass): initialized
I0813 01:50:57.145524  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.145872  114703 store.go:1378] Monitoring ingressclasses.networking.k8s.io count at <storage-prefix>//ingressclasses
I0813 01:50:57.147276  114703 master.go:539] Enabling API group "networking.k8s.io".
I0813 01:50:57.147326  114703 cacher.go:402] cacher (*networking.Ingress): initialized
I0813 01:50:57.147341  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.147485  114703 storage_factory.go:285] storing runtimeclasses.node.k8s.io in node.k8s.io/v1beta1, reading as node.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.147648  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.147677  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.147736  114703 reflector.go:243] Listing and watching *networking.IngressClass from storage/cacher.go:/ingressclasses
I0813 01:50:57.148548  114703 store.go:1378] Monitoring runtimeclasses.node.k8s.io count at <storage-prefix>//runtimeclasses
I0813 01:50:57.148566  114703 master.go:539] Enabling API group "node.k8s.io".
I0813 01:50:57.149329  114703 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.149524  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.149542  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.149620  114703 reflector.go:243] Listing and watching *node.RuntimeClass from storage/cacher.go:/runtimeclasses
I0813 01:50:57.150112  114703 cacher.go:402] cacher (*networking.IngressClass): initialized
I0813 01:50:57.150122  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.151120  114703 store.go:1378] Monitoring poddisruptionbudgets.policy count at <storage-prefix>//poddisruptionbudgets
I0813 01:50:57.151311  114703 cacher.go:402] cacher (*node.RuntimeClass): initialized
I0813 01:50:57.151326  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.151425  114703 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.151576  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.151597  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.151662  114703 reflector.go:243] Listing and watching *policy.PodDisruptionBudget from storage/cacher.go:/poddisruptionbudgets
I0813 01:50:57.155133  114703 cacher.go:402] cacher (*policy.PodDisruptionBudget): initialized
I0813 01:50:57.155152  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.155910  114703 store.go:1378] Monitoring podsecuritypolicies.policy count at <storage-prefix>//podsecuritypolicy
I0813 01:50:57.155936  114703 master.go:539] Enabling API group "policy".
I0813 01:50:57.156035  114703 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.156358  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.156429  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.156697  114703 reflector.go:243] Listing and watching *policy.PodSecurityPolicy from storage/cacher.go:/podsecuritypolicy
I0813 01:50:57.159287  114703 cacher.go:402] cacher (*policy.PodSecurityPolicy): initialized
I0813 01:50:57.159310  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.159693  114703 store.go:1378] Monitoring roles.rbac.authorization.k8s.io count at <storage-prefix>//roles
I0813 01:50:57.159763  114703 reflector.go:243] Listing and watching *rbac.Role from storage/cacher.go:/roles
I0813 01:50:57.159934  114703 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.160082  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.160102  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.162924  114703 store.go:1378] Monitoring rolebindings.rbac.authorization.k8s.io count at <storage-prefix>//rolebindings
I0813 01:50:57.163003  114703 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.163152  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.163184  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.163357  114703 reflector.go:243] Listing and watching *rbac.RoleBinding from storage/cacher.go:/rolebindings
I0813 01:50:57.163509  114703 cacher.go:402] cacher (*rbac.Role): initialized
I0813 01:50:57.163622  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.164583  114703 store.go:1378] Monitoring clusterroles.rbac.authorization.k8s.io count at <storage-prefix>//clusterroles
I0813 01:50:57.164683  114703 reflector.go:243] Listing and watching *rbac.ClusterRole from storage/cacher.go:/clusterroles
I0813 01:50:57.165312  114703 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.165515  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.165534  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.167756  114703 store.go:1378] Monitoring clusterrolebindings.rbac.authorization.k8s.io count at <storage-prefix>//clusterrolebindings
I0813 01:50:57.167840  114703 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.167863  114703 reflector.go:243] Listing and watching *rbac.ClusterRoleBinding from storage/cacher.go:/clusterrolebindings
I0813 01:50:57.168159  114703 cacher.go:402] cacher (*rbac.RoleBinding): initialized
I0813 01:50:57.168172  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.169148  114703 cacher.go:402] cacher (*rbac.ClusterRoleBinding): initialized
I0813 01:50:57.169359  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.169912  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.169945  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.173394  114703 cacher.go:402] cacher (*rbac.ClusterRole): initialized
I0813 01:50:57.173411  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.174620  114703 store.go:1378] Monitoring roles.rbac.authorization.k8s.io count at <storage-prefix>//roles
I0813 01:50:57.174871  114703 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.175014  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.175037  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.175332  114703 reflector.go:243] Listing and watching *rbac.Role from storage/cacher.go:/roles
I0813 01:50:57.177719  114703 store.go:1378] Monitoring rolebindings.rbac.authorization.k8s.io count at <storage-prefix>//rolebindings
I0813 01:50:57.177843  114703 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.178007  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.178030  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.178335  114703 reflector.go:243] Listing and watching *rbac.RoleBinding from storage/cacher.go:/rolebindings
I0813 01:50:57.179484  114703 cacher.go:402] cacher (*rbac.RoleBinding): initialized
I0813 01:50:57.179497  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.180028  114703 store.go:1378] Monitoring clusterroles.rbac.authorization.k8s.io count at <storage-prefix>//clusterroles
I0813 01:50:57.180204  114703 reflector.go:243] Listing and watching *rbac.ClusterRole from storage/cacher.go:/clusterroles
I0813 01:50:57.180432  114703 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.180612  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.180632  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.184686  114703 store.go:1378] Monitoring clusterrolebindings.rbac.authorization.k8s.io count at <storage-prefix>//clusterrolebindings
I0813 01:50:57.185277  114703 cacher.go:402] cacher (*rbac.Role): initialized
I0813 01:50:57.185293  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.185457  114703 reflector.go:243] Listing and watching *rbac.ClusterRoleBinding from storage/cacher.go:/clusterrolebindings
I0813 01:50:57.185862  114703 master.go:539] Enabling API group "rbac.authorization.k8s.io".
I0813 01:50:57.186048  114703 cacher.go:402] cacher (*rbac.ClusterRole): initialized
I0813 01:50:57.186062  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.188227  114703 cacher.go:402] cacher (*rbac.ClusterRoleBinding): initialized
I0813 01:50:57.188240  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.192982  114703 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.193436  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.193636  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.194784  114703 store.go:1378] Monitoring priorityclasses.scheduling.k8s.io count at <storage-prefix>//priorityclasses
I0813 01:50:57.195034  114703 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.195230  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.195260  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.195505  114703 reflector.go:243] Listing and watching *scheduling.PriorityClass from storage/cacher.go:/priorityclasses
I0813 01:50:57.197729  114703 cacher.go:402] cacher (*scheduling.PriorityClass): initialized
I0813 01:50:57.198028  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.198510  114703 store.go:1378] Monitoring priorityclasses.scheduling.k8s.io count at <storage-prefix>//priorityclasses
I0813 01:50:57.198546  114703 master.go:539] Enabling API group "scheduling.k8s.io".
I0813 01:50:57.198649  114703 master.go:528] Skipping disabled API group "settings.k8s.io".
I0813 01:50:57.198872  114703 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.199044  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.199064  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.199279  114703 reflector.go:243] Listing and watching *scheduling.PriorityClass from storage/cacher.go:/priorityclasses
I0813 01:50:57.200303  114703 store.go:1378] Monitoring storageclasses.storage.k8s.io count at <storage-prefix>//storageclasses
I0813 01:50:57.200542  114703 reflector.go:243] Listing and watching *storage.StorageClass from storage/cacher.go:/storageclasses
I0813 01:50:57.202028  114703 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.202325  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.202393  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.203382  114703 cacher.go:402] cacher (*storage.StorageClass): initialized
I0813 01:50:57.203938  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.203501  114703 cacher.go:402] cacher (*scheduling.PriorityClass): initialized
I0813 01:50:57.207066  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.208413  114703 store.go:1378] Monitoring volumeattachments.storage.k8s.io count at <storage-prefix>//volumeattachments
I0813 01:50:57.208675  114703 reflector.go:243] Listing and watching *storage.VolumeAttachment from storage/cacher.go:/volumeattachments
I0813 01:50:57.209006  114703 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.209398  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.209544  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.212335  114703 cacher.go:402] cacher (*storage.VolumeAttachment): initialized
I0813 01:50:57.212399  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.213356  114703 store.go:1378] Monitoring csinodes.storage.k8s.io count at <storage-prefix>//csinodes
I0813 01:50:57.213615  114703 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.213845  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.213880  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.214021  114703 reflector.go:243] Listing and watching *storage.CSINode from storage/cacher.go:/csinodes
I0813 01:50:57.215417  114703 cacher.go:402] cacher (*storage.CSINode): initialized
I0813 01:50:57.215434  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.215990  114703 store.go:1378] Monitoring csidrivers.storage.k8s.io count at <storage-prefix>//csidrivers
I0813 01:50:57.216109  114703 reflector.go:243] Listing and watching *storage.CSIDriver from storage/cacher.go:/csidrivers
I0813 01:50:57.218210  114703 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.218582  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.218926  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.218867  114703 cacher.go:402] cacher (*storage.CSIDriver): initialized
I0813 01:50:57.219067  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.223974  114703 store.go:1378] Monitoring storageclasses.storage.k8s.io count at <storage-prefix>//storageclasses
I0813 01:50:57.224182  114703 reflector.go:243] Listing and watching *storage.StorageClass from storage/cacher.go:/storageclasses
I0813 01:50:57.224224  114703 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.224382  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.224411  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.226545  114703 cacher.go:402] cacher (*storage.StorageClass): initialized
I0813 01:50:57.226758  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.226975  114703 store.go:1378] Monitoring volumeattachments.storage.k8s.io count at <storage-prefix>//volumeattachments
I0813 01:50:57.227217  114703 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.227451  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.227932  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.227630  114703 reflector.go:243] Listing and watching *storage.VolumeAttachment from storage/cacher.go:/volumeattachments
I0813 01:50:57.230032  114703 store.go:1378] Monitoring csinodes.storage.k8s.io count at <storage-prefix>//csinodes
I0813 01:50:57.230041  114703 cacher.go:402] cacher (*storage.VolumeAttachment): initialized
I0813 01:50:57.230057  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.230442  114703 reflector.go:243] Listing and watching *storage.CSINode from storage/cacher.go:/csinodes
I0813 01:50:57.230736  114703 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.231107  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.231194  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.233347  114703 store.go:1378] Monitoring csidrivers.storage.k8s.io count at <storage-prefix>//csidrivers
I0813 01:50:57.233536  114703 master.go:539] Enabling API group "storage.k8s.io".
I0813 01:50:57.233766  114703 master.go:528] Skipping disabled API group "flowcontrol.apiserver.k8s.io".
I0813 01:50:57.233468  114703 cacher.go:402] cacher (*storage.CSINode): initialized
I0813 01:50:57.234053  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.233499  114703 reflector.go:243] Listing and watching *storage.CSIDriver from storage/cacher.go:/csidrivers
I0813 01:50:57.235010  114703 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.235342  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.235376  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.236060  114703 cacher.go:402] cacher (*storage.CSIDriver): initialized
I0813 01:50:57.236083  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.237678  114703 store.go:1378] Monitoring deployments.apps count at <storage-prefix>//deployments
I0813 01:50:57.239417  114703 reflector.go:243] Listing and watching *apps.Deployment from storage/cacher.go:/deployments
I0813 01:50:57.239649  114703 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.239913  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.239947  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.240635  114703 cacher.go:402] cacher (*apps.Deployment): initialized
I0813 01:50:57.240726  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.243587  114703 store.go:1378] Monitoring statefulsets.apps count at <storage-prefix>//statefulsets
I0813 01:50:57.243990  114703 reflector.go:243] Listing and watching *apps.StatefulSet from storage/cacher.go:/statefulsets
I0813 01:50:57.245024  114703 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.245934  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.245977  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.246076  114703 cacher.go:402] cacher (*apps.StatefulSet): initialized
I0813 01:50:57.246240  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.248581  114703 store.go:1378] Monitoring daemonsets.apps count at <storage-prefix>//daemonsets
I0813 01:50:57.250784  114703 reflector.go:243] Listing and watching *apps.DaemonSet from storage/cacher.go:/daemonsets
I0813 01:50:57.250939  114703 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.251190  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.251225  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.251702  114703 cacher.go:402] cacher (*apps.DaemonSet): initialized
I0813 01:50:57.251836  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.253814  114703 store.go:1378] Monitoring replicasets.apps count at <storage-prefix>//replicasets
I0813 01:50:57.254151  114703 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.254489  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.254708  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.255361  114703 reflector.go:243] Listing and watching *apps.ReplicaSet from storage/cacher.go:/replicasets
I0813 01:50:57.256597  114703 store.go:1378] Monitoring controllerrevisions.apps count at <storage-prefix>//controllerrevisions
I0813 01:50:57.257403  114703 master.go:539] Enabling API group "apps".
I0813 01:50:57.257721  114703 cacher.go:402] cacher (*apps.ReplicaSet): initialized
I0813 01:50:57.257743  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.257538  114703 reflector.go:243] Listing and watching *apps.ControllerRevision from storage/cacher.go:/controllerrevisions
I0813 01:50:57.257940  114703 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.258179  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.258213  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.258947  114703 cacher.go:402] cacher (*apps.ControllerRevision): initialized
I0813 01:50:57.258965  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.259124  114703 store.go:1378] Monitoring validatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//validatingwebhookconfigurations
I0813 01:50:57.259303  114703 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.259442  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.259455  114703 reflector.go:243] Listing and watching *admissionregistration.ValidatingWebhookConfiguration from storage/cacher.go:/validatingwebhookconfigurations
I0813 01:50:57.259461  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.260477  114703 store.go:1378] Monitoring mutatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//mutatingwebhookconfigurations
I0813 01:50:57.260674  114703 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.261262  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.261285  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.261502  114703 reflector.go:243] Listing and watching *admissionregistration.MutatingWebhookConfiguration from storage/cacher.go:/mutatingwebhookconfigurations
I0813 01:50:57.263548  114703 cacher.go:402] cacher (*admissionregistration.ValidatingWebhookConfiguration): initialized
I0813 01:50:57.263560  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.267623  114703 cacher.go:402] cacher (*admissionregistration.MutatingWebhookConfiguration): initialized
I0813 01:50:57.267645  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.267801  114703 store.go:1378] Monitoring validatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//validatingwebhookconfigurations
I0813 01:50:57.268120  114703 reflector.go:243] Listing and watching *admissionregistration.ValidatingWebhookConfiguration from storage/cacher.go:/validatingwebhookconfigurations
I0813 01:50:57.268121  114703 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.268286  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.268305  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.283531  114703 store.go:1378] Monitoring mutatingwebhookconfigurations.admissionregistration.k8s.io count at <storage-prefix>//mutatingwebhookconfigurations
I0813 01:50:57.283563  114703 master.go:539] Enabling API group "admissionregistration.k8s.io".
I0813 01:50:57.283648  114703 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.283982  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.284010  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.284268  114703 reflector.go:243] Listing and watching *admissionregistration.MutatingWebhookConfiguration from storage/cacher.go:/mutatingwebhookconfigurations
I0813 01:50:57.286929  114703 cacher.go:402] cacher (*admissionregistration.ValidatingWebhookConfiguration): initialized
I0813 01:50:57.286949  114703 watch_cache.go:521] Replace watchCache (rev: 9634) 
I0813 01:50:57.289440  114703 store.go:1378] Monitoring events count at <storage-prefix>//events
I0813 01:50:57.289516  114703 storage_factory.go:285] storing events in v1, reading as __internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.289875  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.289902  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.290119  114703 reflector.go:243] Listing and watching *core.Event from storage/cacher.go:/events
I0813 01:50:57.308376  114703 cacher.go:402] cacher (*core.Event): initialized
I0813 01:50:57.308413  114703 watch_cache.go:521] Replace watchCache (rev: 9635) 
I0813 01:50:57.308432  114703 reflector.go:243] Listing and watching *core.Event from storage/cacher.go:/events
I0813 01:50:57.308396  114703 store.go:1378] Monitoring events count at <storage-prefix>//events
I0813 01:50:57.308512  114703 master.go:539] Enabling API group "events.k8s.io".
I0813 01:50:57.316529  114703 cacher.go:402] cacher (*admissionregistration.MutatingWebhookConfiguration): initialized
I0813 01:50:57.316557  114703 watch_cache.go:521] Replace watchCache (rev: 9635) 
I0813 01:50:57.321158  114703 cacher.go:402] cacher (*core.Event): initialized
I0813 01:50:57.321178  114703 watch_cache.go:521] Replace watchCache (rev: 9636) 
I0813 01:50:57.683721  114703 storage_factory.go:285] storing tokenreviews.authentication.k8s.io in authentication.k8s.io/v1, reading as authentication.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.688817  114703 storage_factory.go:285] storing tokenreviews.authentication.k8s.io in authentication.k8s.io/v1, reading as authentication.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.694513  114703 storage_factory.go:285] storing localsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.699788  114703 storage_factory.go:285] storing selfsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.706518  114703 storage_factory.go:285] storing selfsubjectrulesreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.714141  114703 storage_factory.go:285] storing subjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.727281  114703 storage_factory.go:285] storing localsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.734351  114703 storage_factory.go:285] storing selfsubjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.739268  114703 storage_factory.go:285] storing selfsubjectrulesreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.746599  114703 storage_factory.go:285] storing subjectaccessreviews.authorization.k8s.io in authorization.k8s.io/v1, reading as authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.752718  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.761225  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.767757  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.775860  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.783365  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.790897  114703 storage_factory.go:285] storing horizontalpodautoscalers.autoscaling in autoscaling/v1, reading as autoscaling/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.798283  114703 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.806357  114703 storage_factory.go:285] storing jobs.batch in batch/v1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.820274  114703 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.827225  114703 storage_factory.go:285] storing cronjobs.batch in batch/v1beta1, reading as batch/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:57.827355  114703 genericapiserver.go:412] Skipping API batch/v2alpha1 because it has no resources.
I0813 01:50:57.835395  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.841140  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.847308  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.855702  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.863852  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.868395  114703 client.go:360] parsed scheme: "endpoint"
I0813 01:50:57.868472  114703 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:50:57.872112  114703 storage_factory.go:285] storing certificatesigningrequests.certificates.k8s.io in certificates.k8s.io/v1beta1, reading as certificates.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.879998  114703 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.886973  114703 storage_factory.go:285] storing leases.coordination.k8s.io in coordination.k8s.io/v1beta1, reading as coordination.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.894961  114703 storage_factory.go:285] storing endpointslices.discovery.k8s.io in discovery.k8s.io/v1beta1, reading as discovery.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:57.895067  114703 genericapiserver.go:412] Skipping API discovery.k8s.io/v1alpha1 because it has no resources.
I0813 01:50:57.900947  114703 storage_factory.go:285] storing ingresses.extensions in networking.k8s.io/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.907166  114703 storage_factory.go:285] storing ingresses.extensions in networking.k8s.io/v1beta1, reading as extensions/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.913694  114703 storage_factory.go:285] storing ingressclasses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.920321  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.924889  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.932352  114703 storage_factory.go:285] storing networkpolicies.networking.k8s.io in networking.k8s.io/v1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.937744  114703 storage_factory.go:285] storing ingressclasses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.944569  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.949243  114703 storage_factory.go:285] storing ingresses.networking.k8s.io in networking.k8s.io/v1beta1, reading as networking.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.955995  114703 storage_factory.go:285] storing runtimeclasses.node.k8s.io in node.k8s.io/v1beta1, reading as node.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:57.956096  114703 genericapiserver.go:412] Skipping API node.k8s.io/v1alpha1 because it has no resources.
I0813 01:50:57.961421  114703 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.968155  114703 storage_factory.go:285] storing poddisruptionbudgets.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.973998  114703 storage_factory.go:285] storing podsecuritypolicies.policy in policy/v1beta1, reading as policy/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.981045  114703 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.987484  114703 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:57.994543  114703 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.000063  114703 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.007398  114703 storage_factory.go:285] storing clusterrolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.012687  114703 storage_factory.go:285] storing clusterroles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.020024  114703 storage_factory.go:285] storing rolebindings.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.027201  114703 storage_factory.go:285] storing roles.rbac.authorization.k8s.io in rbac.authorization.k8s.io/v1, reading as rbac.authorization.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:58.027399  114703 genericapiserver.go:412] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
I0813 01:50:58.032526  114703 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.039891  114703 storage_factory.go:285] storing priorityclasses.scheduling.k8s.io in scheduling.k8s.io/v1, reading as scheduling.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:58.040186  114703 genericapiserver.go:412] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
I0813 01:50:58.046090  114703 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.052409  114703 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.058392  114703 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.065788  114703 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.070488  114703 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.080684  114703 storage_factory.go:285] storing csidrivers.storage.k8s.io in storage.k8s.io/v1beta1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.086366  114703 storage_factory.go:285] storing csinodes.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.093888  114703 storage_factory.go:285] storing storageclasses.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.099111  114703 storage_factory.go:285] storing volumeattachments.storage.k8s.io in storage.k8s.io/v1, reading as storage.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:58.099416  114703 genericapiserver.go:412] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
I0813 01:50:58.106504  114703 storage_factory.go:285] storing controllerrevisions.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.112528  114703 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.119144  114703 storage_factory.go:285] storing daemonsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.125453  114703 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.132094  114703 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.137192  114703 storage_factory.go:285] storing deployments.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.143699  114703 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.148664  114703 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.154716  114703 storage_factory.go:285] storing replicasets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.160101  114703 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.165781  114703 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.171977  114703 storage_factory.go:285] storing statefulsets.apps in apps/v1, reading as apps/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
W0813 01:50:58.172092  114703 genericapiserver.go:412] Skipping API apps/v1beta2 because it has no resources.
W0813 01:50:58.172102  114703 genericapiserver.go:412] Skipping API apps/v1beta1 because it has no resources.
I0813 01:50:58.178710  114703 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.184315  114703 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.191070  114703 storage_factory.go:285] storing mutatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.196542  114703 storage_factory.go:285] storing validatingwebhookconfigurations.admissionregistration.k8s.io in admissionregistration.k8s.io/v1beta1, reading as admissionregistration.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.203152  114703 storage_factory.go:285] storing events.events.k8s.io in v1, reading as events.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:50:58.209744  114703 storage_factory.go:285] storing events.events.k8s.io in v1, reading as events.k8s.io/__internal from storagebackend.Config{Type:"", Prefix:"/6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry", Transport:storagebackend.TransportConfig{ServerList:[]string{"http://127.0.0.1:2379"}, KeyFile:"", CertFile:"", TrustedCAFile:"", EgressLookup:(egressselector.Lookup)(nil)}, Paging:true, Codec:runtime.Codec(nil), EncodeVersioner:runtime.GroupVersioner(nil), Transformer:value.Transformer(nil), CompactionInterval:300000000000, CountMetricPollPeriod:60000000000, DBMetricPollInterval:30000000000}
I0813 01:51:17.455898  114703 dynamic_cafile_content.go:167] Starting request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956
I0813 01:51:17.455916  114703 dynamic_cafile_content.go:167] Starting client-ca-bundle::/tmp/test-integration-TestDynamicClientBuilder179258641/client-ca.crt209897323
I0813 01:51:17.456472  114703 tlsconfig.go:178] loaded client CA [0/"client-ca-bundle::/tmp/test-integration-TestDynamicClientBuilder179258641/client-ca.crt209897323,request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956"]: "client-ca" [] issuer="<self>" (2020-08-13 01:50:54 +0000 UTC to 2030-08-11 01:50:54 +0000 UTC (now=2020-08-13 01:51:17.456440086 +0000 UTC))
I0813 01:51:17.456524  114703 tlsconfig.go:178] loaded client CA [1/"client-ca-bundle::/tmp/test-integration-TestDynamicClientBuilder179258641/client-ca.crt209897323,request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956"]: "front-proxy-ca" [] issuer="<self>" (2020-08-13 01:50:53 +0000 UTC to 2030-08-11 01:50:53 +0000 UTC (now=2020-08-13 01:51:17.456507202 +0000 UTC))
I0813 01:51:17.456873  114703 tlsconfig.go:200] loaded serving cert ["serving-cert::/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.crt::/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.key"]: "10.60.65.31@1597283455" [serving] validServingFor=[10.60.65.31,10.0.0.1,127.0.0.1,kubernetes.default.svc,kubernetes.default,kubernetes] issuer="10.60.65.31-ca@1597283454" (2020-08-13 00:50:54 +0000 UTC to 2021-08-13 00:50:54 +0000 UTC (now=2020-08-13 01:51:17.456859353 +0000 UTC))
I0813 01:51:17.457259  114703 dynamic_serving_content.go:130] Starting serving-cert::/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.crt::/tmp/test-integration-TestDynamicClientBuilder179258641/apiserver.key
I0813 01:51:17.457545  114703 named_certificates.go:53] loaded SNI cert [0/"self-signed loopback"]: "apiserver-loopback-client@1597283456" [serving] validServingFor=[apiserver-loopback-client] issuer="apiserver-loopback-client-ca@1597283456" (2020-08-13 00:50:55 +0000 UTC to 2021-08-13 00:50:55 +0000 UTC (now=2020-08-13 01:51:17.457527697 +0000 UTC))
I0813 01:51:17.457582  114703 secure_serving.go:197] Serving securely on 127.0.0.1:42895
I0813 01:51:17.457830  114703 tlsconfig.go:240] Starting DynamicServingCertificateController
E0813 01:51:17.459593  114703 controller.go:152] Unable to remove old endpoints from kubernetes service: StorageError: key not found, Code: 1, Key: /6afd41d2-eb86-4749-8489-cbb5dcd16d04/registry/masterleases/10.60.65.31, ResourceVersion: 0, AdditionalErrorMsg: 
I0813 01:51:17.464315  114703 reflector.go:207] Starting reflector *v1.LimitRange (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.464339  114703 reflector.go:243] Listing and watching *v1.LimitRange from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.464964  114703 reflector.go:207] Starting reflector *v1.RoleBinding (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.464997  114703 reflector.go:243] Listing and watching *v1.RoleBinding from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.465630  114703 reflector.go:207] Starting reflector *v1.Role (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.465660  114703 reflector.go:243] Listing and watching *v1.Role from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.466001  114703 reflector.go:207] Starting reflector *v1.PersistentVolume (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.466017  114703 reflector.go:243] Listing and watching *v1.PersistentVolume from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.466704  114703 reflector.go:207] Starting reflector *v1.StorageClass (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.466720  114703 reflector.go:243] Listing and watching *v1.StorageClass from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.467111  114703 reflector.go:207] Starting reflector *v1.ValidatingWebhookConfiguration (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.467124  114703 reflector.go:243] Listing and watching *v1.ValidatingWebhookConfiguration from k8s.io/client-go/informers/factory.go:134
W0813 01:51:17.468920  114703 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I0813 01:51:17.469166  114703 dynamic_cafile_content.go:167] Starting client-ca-bundle::/tmp/test-integration-TestDynamicClientBuilder179258641/client-ca.crt209897323
I0813 01:51:17.469262  114703 dynamic_cafile_content.go:167] Starting request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956
I0813 01:51:17.469334  114703 cluster_authentication_trust_controller.go:440] Starting cluster_authentication_trust_controller controller
I0813 01:51:17.469343  114703 shared_informer.go:240] Waiting for caches to sync for cluster_authentication_trust_controller
I0813 01:51:17.469423  114703 reflector.go:207] Starting reflector *v1.ResourceQuota (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.469441  114703 reflector.go:243] Listing and watching *v1.ResourceQuota from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.469735  114703 reflector.go:207] Starting reflector *v1.ConfigMap (12h0m0s) from k8s.io/kubernetes/pkg/master/controller/clusterauthenticationtrust/cluster_authentication_trust_controller.go:444
I0813 01:51:17.469749  114703 reflector.go:243] Listing and watching *v1.ConfigMap from k8s.io/kubernetes/pkg/master/controller/clusterauthenticationtrust/cluster_authentication_trust_controller.go:444
I0813 01:51:17.470107  114703 reflector.go:207] Starting reflector *v1.Node (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.470146  114703 reflector.go:243] Listing and watching *v1.Node from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.471329  114703 reflector.go:207] Starting reflector *v1.ClusterRole (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.471349  114703 reflector.go:243] Listing and watching *v1.ClusterRole from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.471535  114703 reflector.go:207] Starting reflector *v1.ClusterRoleBinding (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.471551  114703 reflector.go:243] Listing and watching *v1.ClusterRoleBinding from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.471990  114703 reflector.go:207] Starting reflector *v1.Namespace (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.472005  114703 reflector.go:243] Listing and watching *v1.Namespace from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.472437  114703 reflector.go:207] Starting reflector *v1.MutatingWebhookConfiguration (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.472537  114703 reflector.go:243] Listing and watching *v1.MutatingWebhookConfiguration from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.472596  114703 reflector.go:207] Starting reflector *v1.PriorityClass (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.472622  114703 reflector.go:243] Listing and watching *v1.PriorityClass from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.472969  114703 reflector.go:207] Starting reflector *v1.Pod (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.473054  114703 reflector.go:243] Listing and watching *v1.Pod from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.473157  114703 reflector.go:207] Starting reflector *v1beta1.RuntimeClass (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.473183  114703 reflector.go:243] Listing and watching *v1beta1.RuntimeClass from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.473575  114703 reflector.go:207] Starting reflector *v1.IngressClass (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.473601  114703 reflector.go:243] Listing and watching *v1.IngressClass from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.473985  114703 reflector.go:207] Starting reflector *v1.Secret (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.474008  114703 reflector.go:243] Listing and watching *v1.Secret from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.476649  114703 reflector.go:207] Starting reflector *v1.VolumeAttachment (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.476674  114703 reflector.go:243] Listing and watching *v1.VolumeAttachment from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.477172  114703 reflector.go:207] Starting reflector *v1.Service (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.477215  114703 reflector.go:243] Listing and watching *v1.Service from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.478536  114703 reflector.go:207] Starting reflector *v1.ServiceAccount (10m0s) from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.480207  114703 reflector.go:243] Listing and watching *v1.ServiceAccount from k8s.io/client-go/informers/factory.go:134
I0813 01:51:17.481826  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/secrets?limit=500&resourceVersion=0" latency="1.036007ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.482677  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/rbac.authorization.k8s.io/v1/rolebindings?limit=500&resourceVersion=0" latency="542.045µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.483286  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/limitranges?limit=500&resourceVersion=0" latency="372.383µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.483807  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/rbac.authorization.k8s.io/v1/roles?limit=500&resourceVersion=0" latency="289.373µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.484289  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/rbac.authorization.k8s.io/v1/clusterroles?limit=500&resourceVersion=0" latency="414.109µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.484510  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/persistentvolumes?limit=500&resourceVersion=0" latency="483.43µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.485835  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/admissionregistration.k8s.io/v1/validatingwebhookconfigurations?limit=500&resourceVersion=0" latency="275.359µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.486444  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/resourcequotas?limit=500&resourceVersion=0" latency="349.764µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.487017  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces/kube-system/configmaps?limit=500&resourceVersion=0" latency="332.981µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.487539  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/services" latency="2.78445ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.488015  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/nodes?limit=500&resourceVersion=0" latency="496.639µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.488495  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/rbac.authorization.k8s.io/v1/clusterrolebindings?limit=500&resourceVersion=0" latency="316.109µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.489123  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces?limit=500&resourceVersion=0" latency="340.011µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.489643  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/scheduling.k8s.io/v1/priorityclasses?limit=500&resourceVersion=0" latency="285.897µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.490211  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/storage.k8s.io/v1/volumeattachments?limit=500&resourceVersion=0" latency="323.913µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.498399  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/networking.k8s.io/v1/ingressclasses?limit=500&resourceVersion=0" latency="483.046µs" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.499354  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/pods?limit=500&resourceVersion=0" latency="8.868995ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.499594  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/node.k8s.io/v1beta1/runtimeclasses?limit=500&resourceVersion=0" latency="6.932154ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
W0813 01:51:17.500430  114703 warnings.go:67] node.k8s.io/v1beta1 RuntimeClass is deprecated in v1.22+, unavailable in v1.25+
I0813 01:51:17.500853  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/storage.k8s.io/v1/storageclasses?limit=500&resourceVersion=0" latency="15.609821ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.501695  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/services?limit=500&resourceVersion=0" latency="3.494899ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.503383  114703 get.go:259] "Starting watch" path="/apis/networking.k8s.io/v1/ingressclasses" resourceVersion="9634" labels="" fields="" timeout="7m45s"
I0813 01:51:17.503981  114703 get.go:259] "Starting watch" path="/apis/admissionregistration.k8s.io/v1/validatingwebhookconfigurations" resourceVersion="9634" labels="" fields="" timeout="5m3s"
I0813 01:51:17.516314  114703 healthz.go:243] healthz check failed: poststarthook/bootstrap-controller,poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/bootstrap-controller failed: not finished
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.516489  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="47.059846ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:17.517096  114703 get.go:259] "Starting watch" path="/api/v1/secrets" resourceVersion="9634" labels="" fields="" timeout="8m24s"
I0813 01:51:17.500571  114703 get.go:259] "Starting watch" path="/api/v1/nodes" resourceVersion="9634" labels="" fields="" timeout="8m50s"
I0813 01:51:17.522903  114703 get.go:259] "Starting watch" path="/apis/rbac.authorization.k8s.io/v1/rolebindings" resourceVersion="9634" labels="" fields="" timeout="7m12s"
I0813 01:51:17.523296  114703 get.go:259] "Starting watch" path="/api/v1/limitranges" resourceVersion="9634" labels="" fields="" timeout="8m28s"
I0813 01:51:17.523621  114703 get.go:259] "Starting watch" path="/api/v1/services" resourceVersion="9634" labels="" fields="" timeout="9m16s"
I0813 01:51:17.523008  114703 get.go:259] "Starting watch" path="/apis/storage.k8s.io/v1/storageclasses" resourceVersion="9634" labels="" fields="" timeout="8m58s"
I0813 01:51:17.524958  114703 get.go:259] "Starting watch" path="/apis/rbac.authorization.k8s.io/v1/roles" resourceVersion="9634" labels="" fields="" timeout="8m43s"
I0813 01:51:17.525248  114703 get.go:259] "Starting watch" path="/api/v1/persistentvolumes" resourceVersion="9634" labels="" fields="" timeout="8m41s"
I0813 01:51:17.525537  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/admissionregistration.k8s.io/v1/mutatingwebhookconfigurations?limit=500&resourceVersion=0" latency="25.300923ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.525870  114703 get.go:259] "Starting watch" path="/api/v1/resourcequotas" resourceVersion="9634" labels="" fields="" timeout="8m41s"
I0813 01:51:17.525962  114703 get.go:259] "Starting watch" path="/api/v1/namespaces/kube-system/configmaps" resourceVersion="9634" labels="" fields="" timeout="9m44s"
I0813 01:51:17.527136  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/serviceaccounts?limit=500&resourceVersion=0" latency="43.374258ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.541501  114703 get.go:259] "Starting watch" path="/apis/rbac.authorization.k8s.io/v1/clusterrolebindings" resourceVersion="9634" labels="" fields="" timeout="7m14s"
I0813 01:51:17.527468  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/services" latency="1.848619ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.555672  114703 get.go:259] "Starting watch" path="/apis/node.k8s.io/v1beta1/runtimeclasses" resourceVersion="9634" labels="" fields="" timeout="7m32s"
I0813 01:51:17.555672  114703 get.go:259] "Starting watch" path="/apis/scheduling.k8s.io/v1/priorityclasses" resourceVersion="9634" labels="" fields="" timeout="8m10s"
I0813 01:51:17.556031  114703 get.go:259] "Starting watch" path="/api/v1/pods" resourceVersion="9634" labels="" fields="" timeout="6m29s"
I0813 01:51:17.556093  114703 get.go:259] "Starting watch" path="/apis/rbac.authorization.k8s.io/v1/clusterroles" resourceVersion="9634" labels="" fields="" timeout="8m42s"
I0813 01:51:17.556420  114703 get.go:259] "Starting watch" path="/apis/storage.k8s.io/v1/volumeattachments" resourceVersion="9634" labels="" fields="" timeout="9m25s"
I0813 01:51:17.542238  114703 get.go:259] "Starting watch" path="/api/v1/namespaces" resourceVersion="9634" labels="" fields="" timeout="5m11s"
W0813 01:51:17.557052  114703 warnings.go:67] node.k8s.io/v1beta1 RuntimeClass is deprecated in v1.22+, unavailable in v1.25+
I0813 01:51:17.557588  114703 get.go:259] "Starting watch" path="/api/v1/serviceaccounts" resourceVersion="9634" labels="" fields="" timeout="7m19s"
I0813 01:51:17.558449  114703 get.go:259] "Starting watch" path="/apis/admissionregistration.k8s.io/v1/mutatingwebhookconfigurations" resourceVersion="9635" labels="" fields="" timeout="6m19s"
I0813 01:51:17.568074  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces/kube-system" latency="2.973817ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=404
I0813 01:51:17.568329  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.568408  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="3.607681ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:17.568473  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/services" latency="1.645463ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.569504  114703 shared_informer.go:270] caches populated
I0813 01:51:17.569517  114703 shared_informer.go:247] Caches are synced for cluster_authentication_trust_controller 
I0813 01:51:17.572401  114703 cluster_authentication_trust_controller.go:165] writing updated authentication info to  kube-system configmaps/extension-apiserver-authentication
I0813 01:51:17.583351  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces/kube-system" latency="1.002606ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=404
I0813 01:51:17.586948  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/services" latency="5.128584ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.596523  114703 httplog.go:89] "HTTP" verb="POST" URI="/api/v1/namespaces" latency="12.444066ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=201
I0813 01:51:17.602337  114703 httplog.go:89] "HTTP" verb="PUT" URI="/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication" latency="1.473587ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=404
I0813 01:51:17.607851  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces/kube-system/resourcequotas" latency="1.523386ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.610684  114703 httplog.go:89] "HTTP" verb="POST" URI="/api/v1/namespaces/kube-system/configmaps" latency="6.212598ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=201
I0813 01:51:17.615069  114703 httplog.go:89] "HTTP" verb="POST" URI="/api/v1/namespaces" latency="36.631176ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=409
I0813 01:51:17.616569  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces/kube-public" latency="1.034683ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=404
I0813 01:51:17.619554  114703 httplog.go:89] "HTTP" verb="POST" URI="/api/v1/namespaces" latency="2.024788ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=201
I0813 01:51:17.621502  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.621587  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/namespaces/kube-node-lease" latency="1.607712ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=404
I0813 01:51:17.621622  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="4.703558ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:17.624250  114703 httplog.go:89] "HTTP" verb="POST" URI="/api/v1/namespaces" latency="2.031891ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=201
I0813 01:51:17.679812  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.679959  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="1.532895ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:17.718153  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.718253  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.25077ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:17.779286  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.779400  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="1.345217ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:17.819700  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.819812  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.304475ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:17.880202  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.880314  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="1.647494ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:17.919606  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.919704  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.13744ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:17.983073  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:17.983210  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="4.43478ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:18.022647  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:18.022783  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.314762ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:18.079349  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:18.079478  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="1.438452ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:18.118988  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:18.119095  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.246858ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:18.179469  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:18.179581  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz" latency="1.404495ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=0
I0813 01:51:18.221383  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:18.221539  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.906458ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
I0813 01:51:18.225746  114703 healthz.go:243] healthz check failed: poststarthook/rbac/bootstrap-roles,poststarthook/scheduling/bootstrap-system-priority-classes
[-]poststarthook/rbac/bootstrap-roles failed: not finished
[-]poststarthook/scheduling/bootstrap-system-priority-classes failed: not finished
I0813 01:51:18.225859  114703 httplog.go:89] "HTTP" verb="GET" URI="/healthz?timeout=32s" latency="1.774839ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50140" resp=0
    test_server.go:169: timed out waiting for the condition
I0813 01:51:18.226481  114703 controller.go:181] Shutting down kubernetes service endpoint reconciler
I0813 01:51:18.226650  114703 dynamic_cafile_content.go:182] Shutting down request-header::/tmp/test-integration-TestDynamicClientBuilder179258641/proxy-ca.crt909360956
I0813 01:51:18.226652  114703 feature_gate.go:243] feature gates: &{map[TokenRequest:true]}
--- FAIL: TestDynamicClientBuilder (25.15s)

				from junit_20200813-014636.xml

Filter through log files | View test history on testgrid


Show 2930 Passed Tests

Show 25 Skipped Tests

Error lines from build-log.txt

... skipping 61 lines ...
Recording: record_command_canary
Running command: record_command_canary

+++ Running case: test-cmd.record_command_canary 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: record_command_canary
/home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh: line 155: bogus-expected-to-fail: command not found
!!! [0813 01:30:32] Call tree:
!!! [0813 01:30:32]  1: /home/prow/go/src/k8s.io/kubernetes/test/cmd/../../third_party/forked/shell2junit/sh2ju.sh:47 record_command_canary(...)
!!! [0813 01:30:32]  2: /home/prow/go/src/k8s.io/kubernetes/test/cmd/../../third_party/forked/shell2junit/sh2ju.sh:112 eVal(...)
!!! [0813 01:30:32]  3: /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh:131 juLog(...)
!!! [0813 01:30:32]  4: /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh:159 record_command(...)
!!! [0813 01:30:32]  5: hack/make-rules/test-cmd.sh:35 source(...)
+++ exit code: 1
+++ error: 1
+++ [0813 01:30:32] Running kubeadm tests
+++ [0813 01:30:41] Building go targets for linux/amd64:
    cmd/kubeadm
+++ [0813 01:31:36] Running tests without code coverage
{"Time":"2020-08-13T01:33:22.836370918Z","Action":"output","Package":"k8s.io/kubernetes/cmd/kubeadm/test/cmd","Output":"ok  \tk8s.io/kubernetes/cmd/kubeadm/test/cmd\t63.417s\n"}
✓  cmd/kubeadm/test/cmd (1m3.424s)
... skipping 323 lines ...
I0813 01:36:02.659090   54230 client.go:360] parsed scheme: "passthrough"
I0813 01:36:02.659148   54230 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:36:02.659159   54230 clientconn.go:948] ClientConn switching balancer to "pick_first"
+++ [0813 01:36:14] Starting controller-manager
Flag --port has been deprecated, see --secure-port instead.
I0813 01:36:15.332892   57736 serving.go:331] Generated self-signed cert in-memory
W0813 01:36:15.923828   57736 authentication.go:368] failed to read in-cluster kubeconfig for delegated authentication: open /var/run/secrets/kubernetes.io/serviceaccount/token: no such file or directory
W0813 01:36:15.923889   57736 authentication.go:265] No authentication-kubeconfig provided in order to lookup client-ca-file in configmap/extension-apiserver-authentication in kube-system, so client certificate authentication won't work.
W0813 01:36:15.923895   57736 authentication.go:289] No authentication-kubeconfig provided in order to lookup requestheader-client-ca-file in configmap/extension-apiserver-authentication in kube-system, so request-header client certificate authentication won't work.
W0813 01:36:15.923915   57736 authorization.go:177] failed to read in-cluster kubeconfig for delegated authorization: open /var/run/secrets/kubernetes.io/serviceaccount/token: no such file or directory
W0813 01:36:15.923929   57736 authorization.go:146] No authorization-kubeconfig provided, so SubjectAccessReview of authorization tokens won't work.
I0813 01:36:15.923948   57736 controllermanager.go:175] Version: v1.20.0-alpha.0.586+679b31b2f32655
I0813 01:36:15.928291   57736 secure_serving.go:197] Serving securely on [::]:10257
I0813 01:36:15.928343   57736 tlsconfig.go:240] Starting DynamicServingCertificateController
I0813 01:36:15.929130   57736 deprecated_insecure_serving.go:53] Serving insecurely on [::]:10252
I0813 01:36:15.929265   57736 leaderelection.go:243] attempting to acquire leader lease  kube-system/kube-controller-manager...
... skipping 11 lines ...
W0813 01:36:16.345908   57736 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W0813 01:36:16.345947   57736 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I0813 01:36:16.345961   57736 controllermanager.go:549] Started "horizontalpodautoscaling"
I0813 01:36:16.346076   57736 horizontal.go:169] Starting HPA controller
I0813 01:36:16.346118   57736 shared_informer.go:240] Waiting for caches to sync for HPA
W0813 01:36:16.346314   57736 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
E0813 01:36:16.346353   57736 core.go:90] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail
W0813 01:36:16.346362   57736 controllermanager.go:541] Skipping "service"
I0813 01:36:16.346372   57736 core.go:240] Will not configure cloud provider routes for allocate-node-cidrs: false, configure-cloud-routes: true.
W0813 01:36:16.346379   57736 controllermanager.go:541] Skipping "route"
W0813 01:36:16.346665   57736 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
node/127.0.0.1 created
+++ [0813 01:36:16] Checking kubectl version
... skipping 100 lines ...
I0813 01:36:16.722017   57736 controllermanager.go:549] Started "statefulset"
I0813 01:36:16.722070   57736 endpoints_controller.go:184] Starting endpoint controller
I0813 01:36:16.722229   57736 shared_informer.go:240] Waiting for caches to sync for endpoint
I0813 01:36:16.722074   57736 stateful_set.go:146] Starting stateful set controller
I0813 01:36:16.722250   57736 shared_informer.go:240] Waiting for caches to sync for stateful set
I0813 01:36:16.722574   57736 node_lifecycle_controller.go:77] Sending events to api server
E0813 01:36:16.722693   57736 core.go:230] failed to start cloud node lifecycle controller: no cloud provider provided
W0813 01:36:16.722711   57736 controllermanager.go:541] Skipping "cloud-node-lifecycle"
I0813 01:36:16.725042   57736 controllermanager.go:549] Started "persistentvolume-binder"
W0813 01:36:16.725430   57736 controllermanager.go:541] Skipping "root-ca-cert-publisher"
I0813 01:36:16.725949   57736 controllermanager.go:549] Started "endpointslice"
I0813 01:36:16.727045   57736 pv_controller_base.go:303] Starting persistent volume controller
I0813 01:36:16.727063   57736 shared_informer.go:240] Waiting for caches to sync for persistent volume
... skipping 24 lines ...
I0813 01:36:16.737431   57736 controllermanager.go:549] Started "replicationcontroller"
I0813 01:36:16.737604   57736 replica_set.go:182] Starting replicationcontroller controller
I0813 01:36:16.737623   57736 shared_informer.go:240] Waiting for caches to sync for ReplicationController
I0813 01:36:16.737826   57736 controllermanager.go:549] Started "csrapproving"
I0813 01:36:16.737877   57736 certificate_controller.go:118] Starting certificate controller "csrapproving"
I0813 01:36:16.737895   57736 shared_informer.go:240] Waiting for caches to sync for certificate-csrapproving
W0813 01:36:16.793769   57736 actual_state_of_world.go:506] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist
I0813 01:36:16.803460   57736 shared_informer.go:247] Caches are synced for ClusterRoleAggregator 
E0813 01:36:16.816066   57736 clusterroleaggregation_controller.go:181] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again
I0813 01:36:16.819051   57736 shared_informer.go:247] Caches are synced for namespace 
I0813 01:36:16.819608   57736 shared_informer.go:247] Caches are synced for service account 
E0813 01:36:16.819701   57736 clusterroleaggregation_controller.go:181] view failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "view": the object has been modified; please apply your changes to the latest version and try again
E0813 01:36:16.819775   57736 clusterroleaggregation_controller.go:181] admin failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "admin": the object has been modified; please apply your changes to the latest version and try again
I0813 01:36:16.820364   57736 shared_informer.go:247] Caches are synced for PV protection 
I0813 01:36:16.821393   57736 shared_informer.go:247] Caches are synced for GC 
I0813 01:36:16.822211   54230 controller.go:606] quota admission added evaluator for: serviceaccounts
I0813 01:36:16.825156   57736 shared_informer.go:247] Caches are synced for endpoint 
I0813 01:36:16.827606   57736 shared_informer.go:247] Caches are synced for endpoint_slice 
I0813 01:36:16.831282   57736 shared_informer.go:247] Caches are synced for endpoint_slice_mirroring 
E0813 01:36:16.833945   57736 clusterroleaggregation_controller.go:181] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again
I0813 01:36:16.834122   57736 shared_informer.go:247] Caches are synced for job 
I0813 01:36:16.837762   57736 shared_informer.go:247] Caches are synced for ReplicationController 
I0813 01:36:16.842236   57736 shared_informer.go:247] Caches are synced for TTL 
E0813 01:36:16.843233   57736 clusterroleaggregation_controller.go:181] admin failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "admin": the object has been modified; please apply your changes to the latest version and try again
I0813 01:36:16.846572   57736 shared_informer.go:247] Caches are synced for HPA 
I0813 01:36:16.900593   57736 shared_informer.go:247] Caches are synced for daemon sets 
I0813 01:36:16.902323   57736 shared_informer.go:247] Caches are synced for taint 
I0813 01:36:16.902524   57736 node_lifecycle_controller.go:1429] Initializing eviction metric for zone: 
I0813 01:36:16.902589   57736 taint_manager.go:187] Starting NoExecuteTaintManager
I0813 01:36:16.902651   57736 node_lifecycle_controller.go:1195] Controller detected that all Nodes are not-Ready. Entering master disruption mode.
... skipping 132 lines ...
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_RESTMapper_evaluation_tests
+++ [0813 01:36:25] Creating namespace namespace-1597282585-6016
namespace/namespace-1597282585-6016 created
Context "test" modified.
+++ [0813 01:36:26] Testing RESTMapper
+++ [0813 01:36:26] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype"
+++ exit code: 0
NAME                              SHORTNAMES   APIGROUP                       NAMESPACED   KIND
bindings                                                                      true         Binding
componentstatuses                 cs                                          false        ComponentStatus
configmaps                        cm                                          true         ConfigMap
endpoints                         ep                                          true         Endpoints
... skipping 59 lines ...
namespace/namespace-1597282593-31067 created
Context "test" modified.
+++ [0813 01:36:33] Testing clusterroles
rbac.sh:29: Successful get clusterroles/cluster-admin {{.metadata.name}}: cluster-admin
(Brbac.sh:30: Successful get clusterrolebindings/cluster-admin {{.metadata.name}}: cluster-admin
(BSuccessful
message:Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "pod-admin" not found
has:clusterroles.rbac.authorization.k8s.io "pod-admin" not found
clusterrole.rbac.authorization.k8s.io/pod-admin created (dry run)
clusterrole.rbac.authorization.k8s.io/pod-admin created (server dry run)
Successful
message:Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "pod-admin" not found
has:clusterroles.rbac.authorization.k8s.io "pod-admin" not found
clusterrole.rbac.authorization.k8s.io/pod-admin created
rbac.sh:42: Successful get clusterrole/pod-admin {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: *:
(BSuccessful
message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
clusterrole.rbac.authorization.k8s.io "pod-admin" deleted
... skipping 21 lines ...
(Bclusterrole.rbac.authorization.k8s.io/url-reader created
rbac.sh:61: Successful get clusterrole/url-reader {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: get:
(Brbac.sh:62: Successful get clusterrole/url-reader {{range.rules}}{{range.nonResourceURLs}}{{.}}:{{end}}{{end}}: /logs/*:/healthz/*:
(Bclusterrole.rbac.authorization.k8s.io/aggregation-reader created
rbac.sh:64: Successful get clusterrole/aggregation-reader {{.metadata.name}}: aggregation-reader
(BSuccessful
message:Error from server (NotFound): clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
has:clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
clusterrolebinding.rbac.authorization.k8s.io/super-admin created (dry run)
clusterrolebinding.rbac.authorization.k8s.io/super-admin created (server dry run)
Successful
message:Error from server (NotFound): clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
has:clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
clusterrolebinding.rbac.authorization.k8s.io/super-admin created
rbac.sh:77: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:
(Bclusterrolebinding.rbac.authorization.k8s.io/super-admin subjects updated (dry run)
clusterrolebinding.rbac.authorization.k8s.io/super-admin subjects updated (server dry run)
rbac.sh:80: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:
... skipping 59 lines ...
rbac.sh:102: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:foo:test-all-user:
(Brbac.sh:103: Successful get clusterrolebinding/super-group {{range.subjects}}{{.name}}:{{end}}: the-group:foo:test-all-user:
(Brbac.sh:104: Successful get clusterrolebinding/super-sa {{range.subjects}}{{.name}}:{{end}}: sa-name:foo:test-all-user:
(Brolebinding.rbac.authorization.k8s.io/admin created (dry run)
rolebinding.rbac.authorization.k8s.io/admin created (server dry run)
Successful
message:Error from server (NotFound): rolebindings.rbac.authorization.k8s.io "admin" not found
has: not found
rolebinding.rbac.authorization.k8s.io/admin created
rbac.sh:113: Successful get rolebinding/admin {{.roleRef.kind}}: ClusterRole
(Brbac.sh:114: Successful get rolebinding/admin {{range.subjects}}{{.name}}:{{end}}: default-admin:
(Brolebinding.rbac.authorization.k8s.io/admin subjects updated
rbac.sh:116: Successful get rolebinding/admin {{range.subjects}}{{.name}}:{{end}}: default-admin:foo:
... skipping 29 lines ...
message:Warning: rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
No resources found in namespace-1597282608-7375 namespace.
has:Role is deprecated
Successful
message:Warning: rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
No resources found in namespace-1597282608-7375 namespace.
Error: 1 warning received
has:Role is deprecated
Successful
message:Warning: rbac.authorization.k8s.io/v1beta1 Role is deprecated in v1.17+, unavailable in v1.22+; use rbac.authorization.k8s.io/v1 Role
No resources found in namespace-1597282608-7375 namespace.
Error: 1 warning received
has:Error: 1 warning received
role.rbac.authorization.k8s.io/pod-admin created (dry run)
role.rbac.authorization.k8s.io/pod-admin created (server dry run)
Successful
message:Error from server (NotFound): roles.rbac.authorization.k8s.io "pod-admin" not found
has: not found
role.rbac.authorization.k8s.io/pod-admin created
rbac.sh:163: Successful get role/pod-admin {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: *:
(Brbac.sh:164: Successful get role/pod-admin {{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}: pods:
(Brbac.sh:165: Successful get role/pod-admin {{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}: :
(BSuccessful
... skipping 461 lines ...
has:valid-pod
Successful
message:NAME        READY   STATUS    RESTARTS   AGE
valid-pod   0/1     Pending   0          1s
has:valid-pod
core.sh:190: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Berror: resource(s) were provided, but no name, label selector, or --all flag specified
core.sh:194: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bcore.sh:198: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Berror: setting 'all' parameter but found a non empty selector. 
core.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bcore.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "valid-pod" force deleted
core.sh:210: Successful get pods -l'name in (valid-pod)' {{range.items}}{{.metadata.name}}:{{end}}: 
(Bcore.sh:215: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-kubectl-describe-pod\" }}found{{end}}{{end}}:: :
... skipping 19 lines ...
(Bpoddisruptionbudget.policy/test-pdb-2 created
core.sh:259: Successful get pdb/test-pdb-2 --namespace=test-kubectl-describe-pod {{.spec.minAvailable}}: 50%
(Bpoddisruptionbudget.policy/test-pdb-3 created
core.sh:265: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2
(Bpoddisruptionbudget.policy/test-pdb-4 created
core.sh:269: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50%
(Berror: min-available and max-unavailable cannot be both specified
I0813 01:37:12.375426   54230 client.go:360] parsed scheme: "passthrough"
I0813 01:37:12.375484   54230 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:37:12.375493   54230 clientconn.go:948] ClientConn switching balancer to "pick_first"
core.sh:275: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/env-test-pod created
matched TEST_CMD_1
... skipping 224 lines ...
core.sh:534: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.2:
(BSuccessful
message:kubectl-create kubectl-patch
has:kubectl-patch
pod/valid-pod patched
core.sh:554: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
(B+++ [0813 01:37:39] "kubectl patch with resourceVersion 596" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again
pod "valid-pod" deleted
pod/valid-pod replaced
core.sh:578: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname
(BSuccessful
message:kubectl-create kubectl-patch kubectl-replace
has:kubectl-replace
Successful
message:error: --grace-period must have --force specified
has:\-\-grace-period must have \-\-force specified
Successful
message:error: --timeout must have --force specified
has:\-\-timeout must have \-\-force specified
node/node-v1-test created
W0813 01:37:42.078874   57736 actual_state_of_world.go:506] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist
core.sh:606: Successful get node node-v1-test {{range.items}}{{if .metadata.annotations.a}}found{{end}}{{end}}:: :
(Bnode/node-v1-test replaced (server dry run)
node/node-v1-test replaced (dry run)
core.sh:631: Successful get node node-v1-test {{range.items}}{{if .metadata.annotations.a}}found{{end}}{{end}}:: :
(Bnode/node-v1-test replaced
core.sh:647: Successful get node node-v1-test {{.metadata.annotations.a}}: b
... skipping 29 lines ...
spec:
  containers:
  - image: k8s.gcr.io/pause:2.0
    name: kubernetes-pause
has:localonlyvalue
core.sh:683: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
(Berror: 'name' already has a value (valid-pod), and --overwrite is false
core.sh:687: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
(Bcore.sh:691: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
(Bpod/valid-pod labeled
core.sh:695: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod-super-sayan
(Bcore.sh:699: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
... skipping 86 lines ...
+++ Running case: test-cmd.run_kubectl_create_error_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_kubectl_create_error_tests
+++ [0813 01:38:02] Creating namespace namespace-1597282682-21046
namespace/namespace-1597282682-21046 created
Context "test" modified.
+++ [0813 01:38:03] Testing kubectl create with error
Error: must specify one of -f and -k

Create a resource from a file or from stdin.

 JSON and YAML formats are accepted.

Examples:
... skipping 42 lines ...

Usage:
  kubectl create -f FILENAME [options]

Use "kubectl <command> --help" for more information about a given command.
Use "kubectl options" for a list of global command-line options (applies to all commands).
+++ [0813 01:38:03] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false
kubectl convert is DEPRECATED and will be removed in a future version.
In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
+++ exit code: 0
Recording: run_kubectl_apply_tests
Running command: run_kubectl_apply_tests

... skipping 31 lines ...
I0813 01:38:09.403573   57736 event.go:291] "Event occurred" object="namespace-1597282684-31784/test-deployment-retainkeys-8695b756f8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: test-deployment-retainkeys-8695b756f8-268hj"
deployment.apps "test-deployment-retainkeys" deleted
apply.sh:88: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/selector-test-pod created
apply.sh:92: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
(BSuccessful
message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
has:pods "selector-test-pod-dont-apply" not found
pod "selector-test-pod" deleted
apply.sh:101: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BW0813 01:38:11.641309   66136 helpers.go:567] --dry-run=true is deprecated (boolean value) and can be replaced with --dry-run=client.
pod/test-pod created (dry run)
pod/test-pod created (dry run)
... skipping 11 lines ...
customresourcedefinition.apiextensions.k8s.io/resources.mygroup.example.com created
Warning: apiextensions.k8s.io/v1beta1 CustomResourceDefinition is deprecated in v1.16+, unavailable in v1.22+; use apiextensions.k8s.io/v1 CustomResourceDefinition
I0813 01:38:16.524561   54230 client.go:360] parsed scheme: "endpoint"
I0813 01:38:16.524618   54230 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
I0813 01:38:16.533681   54230 controller.go:606] quota admission added evaluator for: resources.mygroup.example.com
kind.mygroup.example.com/myobj created (server dry run)
Error from server (NotFound): resources.mygroup.example.com "myobj" not found
I0813 01:38:16.714462   57736 horizontal.go:354] Horizontal Pod Autoscaler frontend has been deleted in namespace-1597282677-22052
customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
namespace/nsb created
apply.sh:158: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/a created
apply.sh:161: Successful get pods a -n nsb {{.metadata.name}}: a
(BI0813 01:38:17.869920   54230 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
W0813 01:38:17.870007   54230 ???:1] failed to send watch cancel request{watch-id 11 0  <nil>} {error 25 0  EOF}
pod/b created
pod/a pruned
Warning: extensions/v1beta1 Ingress is deprecated in v1.14+, unavailable in v1.22+; use networking.k8s.io/v1 Ingress
apply.sh:165: Successful get pods b -n nsb {{.metadata.name}}: b
(BSuccessful
message:Error from server (NotFound): pods "a" not found
has:pods "a" not found
pod "b" deleted
apply.sh:175: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/a created
apply.sh:180: Successful get pods a {{.metadata.name}}: a
(BSuccessful
message:Error from server (NotFound): pods "b" not found
has:pods "b" not found
pod/b created
apply.sh:188: Successful get pods a {{.metadata.name}}: a
(Bapply.sh:189: Successful get pods b -n nsb {{.metadata.name}}: b
(Bpod "a" deleted
pod "b" deleted
Successful
message:error: all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector
has:all resources selected for prune without explicitly passing --all
pod/a created
pod/b created
service/prune-svc created
Warning: extensions/v1beta1 Ingress is deprecated in v1.14+, unavailable in v1.22+; use networking.k8s.io/v1 Ingress
apply.sh:201: Successful get pods a {{.metadata.name}}: a
... skipping 42 lines ...
(Bpod/b created
apply.sh:242: Successful get pods b -n nsb {{.metadata.name}}: b
(Bpod/b unchanged
pod/a pruned
Warning: extensions/v1beta1 Ingress is deprecated in v1.14+, unavailable in v1.22+; use networking.k8s.io/v1 Ingress
Successful
message:Error from server (NotFound): pods "a" not found
has:pods "a" not found
apply.sh:249: Successful get pods b -n nsb {{.metadata.name}}: b
(Bnamespace "nsb" deleted
Successful
message:error: the namespace from the provided object "nsb" does not match the namespace "foo". You must pass '--namespace=nsb' to perform this operation.
has:the namespace from the provided object "nsb" does not match the namespace "foo".
apply.sh:260: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: 
(Bservice/a created
apply.sh:264: Successful get services a {{.metadata.name}}: a
(BSuccessful
message:The Service "a" is invalid: spec.clusterIP: Invalid value: "10.0.0.12": field is immutable
... skipping 26 lines ...
(Bapply.sh:286: Successful get deployment test-the-deployment {{.metadata.name}}: test-the-deployment
(Bapply.sh:287: Successful get service test-the-service {{.metadata.name}}: test-the-service
(Bconfigmap "test-the-map" deleted
service "test-the-service" deleted
deployment.apps "test-the-deployment" deleted
Successful
message:Error from server (NotFound): namespaces "multi-resource-ns" not found
has:namespaces "multi-resource-ns" not found
apply.sh:295: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:namespace/multi-resource-ns created
Error from server (NotFound): error when creating "hack/testdata/multi-resource-1.yaml": namespaces "multi-resource-ns" not found
has:namespaces "multi-resource-ns" not found
Successful
message:Error from server (NotFound): pods "test-pod" not found
has:pods "test-pod" not found
pod/test-pod created
namespace/multi-resource-ns unchanged
apply.sh:303: Successful get pods test-pod -n multi-resource-ns {{.metadata.name}}: test-pod
(Bpod "test-pod" deleted
namespace "multi-resource-ns" deleted
apply.sh:309: Successful get configmaps {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:configmap/foo created
error: unable to recognize "hack/testdata/multi-resource-2.yaml": no matches for kind "Bogus" in version "example.com/v1"
has:no matches for kind "Bogus" in version "example.com/v1"
apply.sh:315: Successful get configmaps foo {{.metadata.name}}: foo
(Bconfigmap "foo" deleted
apply.sh:321: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:pod/pod-a created
... skipping 6 lines ...
pod "pod-c" deleted
apply.sh:329: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bapply.sh:333: Successful get crds {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Warning: apiextensions.k8s.io/v1beta1 CustomResourceDefinition is deprecated in v1.16+, unavailable in v1.22+; use apiextensions.k8s.io/v1 CustomResourceDefinition
customresourcedefinition.apiextensions.k8s.io/widgets.example.com created
error: unable to recognize "hack/testdata/multi-resource-4.yaml": no matches for kind "Widget" in version "example.com/v1"
has:no matches for kind "Widget" in version "example.com/v1"
I0813 01:39:07.010739   54230 client.go:360] parsed scheme: "endpoint"
I0813 01:39:07.010794   54230 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
Successful
message:Error from server (NotFound): widgets.example.com "foo" not found
has:widgets.example.com "foo" not found
apply.sh:339: Successful get crds widgets.example.com {{.metadata.name}}: widgets.example.com
(BI0813 01:39:07.572086   54230 controller.go:606] quota admission added evaluator for: widgets.example.com
widget.example.com/foo created
Warning: apiextensions.k8s.io/v1beta1 CustomResourceDefinition is deprecated in v1.16+, unavailable in v1.22+; use apiextensions.k8s.io/v1 CustomResourceDefinition
customresourcedefinition.apiextensions.k8s.io/widgets.example.com unchanged
... skipping 13 lines ...
Context "test" modified.
+++ [0813 01:39:08] Testing kubectl apply --server-side
I0813 01:39:08.448036   57736 namespace_controller.go:185] Namespace has been deleted multi-resource-ns
apply.sh:359: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/test-pod serverside-applied
I0813 01:39:08.952498   54230 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
W0813 01:39:08.952567   54230 ???:1] failed to send watch cancel request{watch-id 11 0  <nil>} {error 25 0  EOF}
apply.sh:363: Successful get pods test-pod {{.metadata.labels.name}}: test-pod-label
(BSuccessful
message:kubectl
has:kubectl
pod/test-pod serverside-applied
Successful
... skipping 10 lines ...
message:931
has:931
pod "test-pod" deleted
apply.sh:398: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(B+++ [0813 01:39:12] Testing upgrade kubectl client-side apply to server-side apply
pod/test-pod created
error: Apply failed with 1 conflict: conflict with "kubectl-client-side-apply" using v1: .metadata.labels.name
Please review the fields above--they currently have other managers. Here
are the ways you can resolve this warning:
* If you intend to manage all of these fields, please re-run the apply
  command with the `--force-conflicts` flag.
* If you do not intend to manage all of the fields, please edit your
  manifest to remove references to the fields that should keep their
... skipping 58 lines ...
message:resources.mygroup.example.com
has:resources.mygroup.example.com
Warning: apiextensions.k8s.io/v1beta1 CustomResourceDefinition is deprecated in v1.16+, unavailable in v1.22+; use apiextensions.k8s.io/v1 CustomResourceDefinition
I0813 01:39:16.688449   54230 client.go:360] parsed scheme: "endpoint"
I0813 01:39:16.688497   54230 endpoint.go:68] ccResolverWrapper: sending new addresses to cc: [{http://127.0.0.1:2379  <nil> 0 <nil>}]
kind.mygroup.example.com/myobj serverside-applied (server dry run)
Error from server (NotFound): resources.mygroup.example.com "myobj" not found
customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
+++ exit code: 0
Recording: run_kubectl_run_tests
Running command: run_kubectl_run_tests

+++ Running case: test-cmd.run_kubectl_run_tests 
... skipping 12 lines ...
(Bpod "nginx-extensions" deleted
Successful
message:pod/test1 created
has:pod/test1 created
pod "test1" deleted
Successful
message:error: Invalid image name "InvalidImageName": invalid reference format
has:error: Invalid image name "InvalidImageName": invalid reference format
+++ exit code: 0
Recording: run_kubectl_create_filter_tests
Running command: run_kubectl_create_filter_tests

+++ Running case: test-cmd.run_kubectl_create_filter_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 3 lines ...
Context "test" modified.
+++ [0813 01:39:23] Testing kubectl create filter
create.sh:50: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/selector-test-pod created
create.sh:54: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
(BSuccessful
message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
has:pods "selector-test-pod-dont-apply" not found
pod "selector-test-pod" deleted
+++ exit code: 0
Recording: run_kubectl_apply_deployments_tests
Running command: run_kubectl_apply_deployments_tests

... skipping 29 lines ...
I0813 01:39:34.850675   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-9bb9c4878 to 3"
I0813 01:39:34.854700   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx-9bb9c4878" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-9bb9c4878-mmc2d"
I0813 01:39:34.862184   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx-9bb9c4878" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-9bb9c4878-znvtl"
I0813 01:39:34.864086   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx-9bb9c4878" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-9bb9c4878-7fqn9"
apps.sh:152: Successful get deployment nginx {{.metadata.name}}: nginx
(BSuccessful
message:Error from server (Conflict): error when applying patch:
{"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1597282766-15906\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}}
to:
Resource: "apps/v1, Resource=deployments", GroupVersionKind: "apps/v1, Kind=Deployment"
Name: "nginx", Namespace: "namespace-1597282766-15906"
for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.apps "nginx": the object has been modified; please apply your changes to the latest version and try again
has:Error from server (Conflict)
deployment.apps/nginx configured
I0813 01:39:45.490896   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-6dd6cfdb57 to 3"
I0813 01:39:45.500333   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-fw5cg"
I0813 01:39:45.507459   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-p9fg8"
I0813 01:39:45.509849   57736 event.go:291] "Event occurred" object="namespace-1597282766-15906/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-9zf72"
Successful
... skipping 323 lines ...
+++ [0813 01:40:06] Creating namespace namespace-1597282806-18538
namespace/namespace-1597282806-18538 created
Context "test" modified.
+++ [0813 01:40:06] Testing kubectl get
get.sh:29: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
get.sh:37: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
get.sh:45: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:{
    "apiVersion": "v1",
    "items": [],
... skipping 23 lines ...
has not:No resources found
Successful
message:NAME
has not:No resources found
get.sh:73: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:error: the server doesn't have a resource type "foobar"
has not:No resources found
Successful
message:No resources found in namespace-1597282806-18538 namespace.
has:No resources found
Successful
message:
has not:No resources found
Successful
message:No resources found in namespace-1597282806-18538 namespace.
has:No resources found
get.sh:93: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
Successful
message:Error from server (NotFound): pods "abc" not found
has not:List
Successful
message:I0813 01:40:09.784730   69628 loader.go:375] Config loaded from file:  /tmp/tmp.53gPYlvcn6/.kube/config
I0813 01:40:09.786344   69628 round_trippers.go:443] GET http://127.0.0.1:8080/version?timeout=32s 200 OK in 1 milliseconds
I0813 01:40:09.831363   69628 round_trippers.go:443] GET http://127.0.0.1:8080/api/v1/namespaces/default/pods 200 OK in 1 milliseconds
I0813 01:40:09.833067   69628 round_trippers.go:443] GET http://127.0.0.1:8080/api/v1/namespaces/default/replicationcontrollers 200 OK in 1 milliseconds
... skipping 627 lines ...
}
get.sh:158: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(B<no value>Successful
message:valid-pod:
has:valid-pod:
Successful
message:error: error executing jsonpath "{.missing}": Error executing template: missing is not found. Printing more information for debugging the template:
	template was:
		{.missing}
	object given to jsonpath engine was:
		map[string]interface {}{"apiVersion":"v1", "kind":"Pod", "metadata":map[string]interface {}{"creationTimestamp":"2020-08-13T01:40:18Z", "labels":map[string]interface {}{"name":"valid-pod"}, "managedFields":[]interface {}{map[string]interface {}{"apiVersion":"v1", "fieldsType":"FieldsV1", "fieldsV1":map[string]interface {}{"f:metadata":map[string]interface {}{"f:labels":map[string]interface {}{".":map[string]interface {}{}, "f:name":map[string]interface {}{}}}, "f:spec":map[string]interface {}{"f:containers":map[string]interface {}{"k:{\"name\":\"kubernetes-serve-hostname\"}":map[string]interface {}{".":map[string]interface {}{}, "f:image":map[string]interface {}{}, "f:imagePullPolicy":map[string]interface {}{}, "f:name":map[string]interface {}{}, "f:resources":map[string]interface {}{".":map[string]interface {}{}, "f:limits":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}, "f:requests":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}}, "f:terminationMessagePath":map[string]interface {}{}, "f:terminationMessagePolicy":map[string]interface {}{}}}, "f:dnsPolicy":map[string]interface {}{}, "f:enableServiceLinks":map[string]interface {}{}, "f:restartPolicy":map[string]interface {}{}, "f:schedulerName":map[string]interface {}{}, "f:securityContext":map[string]interface {}{}, "f:terminationGracePeriodSeconds":map[string]interface {}{}}}, "manager":"kubectl-create", "operation":"Update", "time":"2020-08-13T01:40:18Z"}}, "name":"valid-pod", "namespace":"namespace-1597282817-16158", "resourceVersion":"1162", "selfLink":"/api/v1/namespaces/namespace-1597282817-16158/pods/valid-pod", "uid":"670db3c3-725b-4bad-ab8d-d3b2ba520add"}, "spec":map[string]interface {}{"containers":[]interface {}{map[string]interface {}{"image":"k8s.gcr.io/serve_hostname", "imagePullPolicy":"Always", "name":"kubernetes-serve-hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File"}}, "dnsPolicy":"ClusterFirst", "enableServiceLinks":true, "preemptionPolicy":"PreemptLowerPriority", "priority":0, "restartPolicy":"Always", "schedulerName":"default-scheduler", "securityContext":map[string]interface {}{}, "terminationGracePeriodSeconds":30}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}}
has:missing is not found
error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing"
Successful
message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template:
	template was:
		{{.missing}}
	raw data was:
		{"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2020-08-13T01:40:18Z","labels":{"name":"valid-pod"},"managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:name":{}}},"f:spec":{"f:containers":{"k:{\"name\":\"kubernetes-serve-hostname\"}":{".":{},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:resources":{".":{},"f:limits":{".":{},"f:cpu":{},"f:memory":{}},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:enableServiceLinks":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}},"manager":"kubectl-create","operation":"Update","time":"2020-08-13T01:40:18Z"}],"name":"valid-pod","namespace":"namespace-1597282817-16158","resourceVersion":"1162","selfLink":"/api/v1/namespaces/namespace-1597282817-16158/pods/valid-pod","uid":"670db3c3-725b-4bad-ab8d-d3b2ba520add"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","enableServiceLinks":true,"preemptionPolicy":"PreemptLowerPriority","priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}}
	object given to template engine was:
		map[apiVersion:v1 kind:Pod metadata:map[creationTimestamp:2020-08-13T01:40:18Z labels:map[name:valid-pod] managedFields:[map[apiVersion:v1 fieldsType:FieldsV1 fieldsV1:map[f:metadata:map[f:labels:map[.:map[] f:name:map[]]] f:spec:map[f:containers:map[k:{"name":"kubernetes-serve-hostname"}:map[.:map[] f:image:map[] f:imagePullPolicy:map[] f:name:map[] f:resources:map[.:map[] f:limits:map[.:map[] f:cpu:map[] f:memory:map[]] f:requests:map[.:map[] f:cpu:map[] f:memory:map[]]] f:terminationMessagePath:map[] f:terminationMessagePolicy:map[]]] f:dnsPolicy:map[] f:enableServiceLinks:map[] f:restartPolicy:map[] f:schedulerName:map[] f:securityContext:map[] f:terminationGracePeriodSeconds:map[]]] manager:kubectl-create operation:Update time:2020-08-13T01:40:18Z]] name:valid-pod namespace:namespace-1597282817-16158 resourceVersion:1162 selfLink:/api/v1/namespaces/namespace-1597282817-16158/pods/valid-pod uid:670db3c3-725b-4bad-ab8d-d3b2ba520add] spec:map[containers:[map[image:k8s.gcr.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst enableServiceLinks:true preemptionPolicy:PreemptLowerPriority priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[] terminationGracePeriodSeconds:30] status:map[phase:Pending qosClass:Guaranteed]]
... skipping 158 lines ...
  terminationGracePeriodSeconds: 30
status:
  phase: Pending
  qosClass: Guaranteed
has:name: valid-pod
Successful
message:Error from server (NotFound): pods "invalid-pod" not found
has:"invalid-pod" not found
pod "valid-pod" deleted
get.sh:196: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/redis-master created
pod/valid-pod created
Successful
... skipping 36 lines ...
+++ [0813 01:40:26] Creating namespace namespace-1597282826-32253
namespace/namespace-1597282826-32253 created
Context "test" modified.
+++ [0813 01:40:26] Testing kubectl exec POD COMMAND
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
pod/test-pod created
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:pods "test-pod" not found
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:pod or type/name must be specified
pod "test-pod" deleted
+++ exit code: 0
Recording: run_kubectl_exec_resource_name_tests
Running command: run_kubectl_exec_resource_name_tests

... skipping 3 lines ...
+++ [0813 01:40:27] Creating namespace namespace-1597282827-1657
namespace/namespace-1597282827-1657 created
Context "test" modified.
+++ [0813 01:40:28] Testing kubectl exec TYPE/NAME COMMAND
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
error: the server doesn't have a resource type "foo"
has:error:
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (NotFound): deployments.apps "bar" not found
has:"bar" not found
pod/test-pod created
replicaset.apps/frontend created
I0813 01:40:29.670755   57736 event.go:291] "Event occurred" object="namespace-1597282827-1657/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-kcjdk"
I0813 01:40:29.675424   57736 event.go:291] "Event occurred" object="namespace-1597282827-1657/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-szgzj"
I0813 01:40:29.675471   57736 event.go:291] "Event occurred" object="namespace-1597282827-1657/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-rwtpv"
configmap/test-set-env-config created
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
error: cannot attach to *v1.ConfigMap: selector for *v1.ConfigMap not implemented
has:not implemented
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:not found
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:pod, type/name or --filename must be specified
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod frontend-kcjdk does not have a host assigned
has not:not found
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod frontend-kcjdk does not have a host assigned
has not:pod, type/name or --filename must be specified
pod "test-pod" deleted
I0813 01:40:30.952587   54230 client.go:360] parsed scheme: "passthrough"
I0813 01:40:30.952668   54230 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:40:30.952682   54230 clientconn.go:948] ClientConn switching balancer to "pick_first"
replicaset.apps "frontend" deleted
... skipping 3 lines ...
Running command: run_create_secret_tests

+++ Running case: test-cmd.run_create_secret_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_create_secret_tests
Successful
message:Error from server (NotFound): secrets "mysecret" not found
has:secrets "mysecret" not found
Successful
message:user-specified
has:user-specified
Successful
message:Error from server (NotFound): secrets "mysecret" not found
has:secrets "mysecret" not found
Successful
{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-update-cm","uid":"183f562b-d6f0-4760-9c54-66e7b3d6e125","resourceVersion":"1250","creationTimestamp":"2020-08-13T01:40:32Z"}}
Successful
message:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-update-cm","uid":"183f562b-d6f0-4760-9c54-66e7b3d6e125","resourceVersion":"1251","creationTimestamp":"2020-08-13T01:40:32Z"},"data":{"key1":"config1"}}
has:uid
Successful
message:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-update-cm","uid":"183f562b-d6f0-4760-9c54-66e7b3d6e125","resourceVersion":"1251","creationTimestamp":"2020-08-13T01:40:32Z"},"data":{"key1":"config1"}}
has:config1
{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Success","details":{"name":"tester-update-cm","kind":"configmaps","uid":"183f562b-d6f0-4760-9c54-66e7b3d6e125"}}
Successful
message:Error from server (NotFound): configmaps "tester-update-cm" not found
has:configmaps "tester-update-cm" not found
+++ exit code: 0
Recording: run_kubectl_create_kustomization_directory_tests
Running command: run_kubectl_create_kustomization_directory_tests

+++ Running case: test-cmd.run_kubectl_create_kustomization_directory_tests 
... skipping 173 lines ...
has:Timeout
Successful
message:NAME        READY   STATUS    RESTARTS   AGE
valid-pod   0/1     Pending   0          2s
has:valid-pod
Successful
message:error: Invalid timeout value. Timeout must be a single integer in seconds, or an integer followed by a corresponding time unit (e.g. 1s | 2m | 3h)
has:Invalid timeout value
pod "valid-pod" deleted
+++ exit code: 0
Recording: run_crd_tests
Running command: run_crd_tests

... skipping 258 lines ...
I0813 01:40:51.834968   57736 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for validfoos.company.com
I0813 01:40:51.835024   57736 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for bars.company.com
I0813 01:40:51.835061   57736 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for resources.mygroup.example.com
I0813 01:40:51.835137   57736 shared_informer.go:240] Waiting for caches to sync for resource quota
I0813 01:40:51.835171   57736 shared_informer.go:247] Caches are synced for resource quota 
crd.sh:240: Successful get foos/test {{.patched}}: <no value>
(B+++ [0813 01:40:52] "kubectl patch --local" returns error as expected for CustomResource: error: cannot apply strategic merge patch for company.com/v1, Kind=Foo locally, try --type merge
{
    "apiVersion": "company.com/v1",
    "kind": "Foo",
    "metadata": {
        "annotations": {
            "kubernetes.io/change-cause": "kubectl patch foos/test --server=http://127.0.0.1:8080 --match-server-version=true --patch={\"patched\":null} --type=merge --record=true"
... skipping 358 lines ...
(Bcrd.sh:450: Successful get bars {{range.items}}{{.metadata.name}}:{{end}}: 
(Bnamespace/non-native-resources created
bar.company.com/test created
crd.sh:455: Successful get bars {{len .items}}: 1
(Bnamespace "non-native-resources" deleted
crd.sh:458: Successful get bars {{len .items}}: 0
(BError from server (NotFound): namespaces "non-native-resources" not found
customresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted
customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted
customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
customresourcedefinition.apiextensions.k8s.io "validfoos.company.com" deleted
+++ exit code: 0
+++ [0813 01:41:15] Testing recursive resources
+++ [0813 01:41:15] Creating namespace namespace-1597282875-18433
namespace/namespace-1597282875-18433 created
W0813 01:41:16.003602   54230 cacher.go:148] Terminating all watchers from cacher *unstructured.Unstructured
I0813 01:41:16.003611   54230 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
Context "test" modified.
E0813 01:41:16.005385   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
W0813 01:41:16.182546   54230 cacher.go:148] Terminating all watchers from cacher *unstructured.Unstructured
I0813 01:41:16.183043   54230 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
E0813 01:41:16.184971   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BW0813 01:41:16.352369   54230 cacher.go:148] Terminating all watchers from cacher *unstructured.Unstructured
I0813 01:41:16.352376   54230 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
E0813 01:41:16.354539   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
W0813 01:41:16.558809   54230 cacher.go:148] Terminating all watchers from cacher *unstructured.Unstructured
I0813 01:41:16.558854   54230 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
E0813 01:41:16.560730   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:pod/busybox0 created
pod/busybox1 created
error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
has:error validating data: kind not set
generic-resources.sh:211: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BE0813 01:41:17.294969   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:220: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox:
(BSuccessful
message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:227: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BE0813 01:41:17.575805   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0813 01:41:17.688688   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0813 01:41:17.808188   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:231: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
(BSuccessful
message:pod/busybox0 replaced
pod/busybox1 replaced
error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
has:error validating data: kind not set
generic-resources.sh:236: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:Name:         busybox0
Namespace:    namespace-1597282875-18433
Priority:     0
Node:         <none>
... skipping 159 lines ...
has:Object 'Kind' is missing
generic-resources.sh:246: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:250: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue:
(BSuccessful
message:pod/busybox0 annotated
pod/busybox1 annotated
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:255: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BI0813 01:41:19.516400   57736 namespace_controller.go:185] Namespace has been deleted non-native-resources
E0813 01:41:19.718082   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:259: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
(BSuccessful
message:Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
pod/busybox0 configured
Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
pod/busybox1 configured
error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
has:error validating data: kind not set
E0813 01:41:19.856067   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:265: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
(BE0813 01:41:19.986380   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
deployment.apps/nginx created
I0813 01:41:20.309219   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-54785cbcb8 to 3"
I0813 01:41:20.313776   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx-54785cbcb8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-54785cbcb8-zwsq2"
I0813 01:41:20.319578   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx-54785cbcb8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-54785cbcb8-9w26z"
I0813 01:41:20.320040   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx-54785cbcb8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-54785cbcb8-zm6pb"
E0813 01:41:20.505210   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:269: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx:
(Bgeneric-resources.sh:270: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bkubectl convert is DEPRECATED and will be removed in a future version.
In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
generic-resources.sh:274: Successful get deployment nginx {{ .apiVersion }}: apps/v1
(BSuccessful
... skipping 45 lines ...
(BI0813 01:41:21.701567   57736 shared_informer.go:240] Waiting for caches to sync for garbage collector
I0813 01:41:21.701652   57736 shared_informer.go:247] Caches are synced for garbage collector 
generic-resources.sh:285: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:kubectl convert is DEPRECATED and will be removed in a future version.
In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version.
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:290: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BI0813 01:41:22.186894   57736 shared_informer.go:240] Waiting for caches to sync for resource quota
I0813 01:41:22.186947   57736 shared_informer.go:247] Caches are synced for resource quota 
Successful
message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:busybox0:busybox1:
Successful
message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:299: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bpod/busybox0 labeled
pod/busybox1 labeled
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
generic-resources.sh:304: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue:
(BSuccessful
message:pod/busybox0 labeled
pod/busybox1 labeled
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:309: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bpod/busybox0 patched
pod/busybox1 patched
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
generic-resources.sh:314: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox:
(BSuccessful
message:pod/busybox0 patched
pod/busybox1 patched
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
E0813 01:41:23.318349   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:319: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:323: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "busybox0" force deleted
pod "busybox1" force deleted
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:328: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(BE0813 01:41:24.127261   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
replicationcontroller/busybox0 created
I0813 01:41:24.281705   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/busybox0" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-xrfff"
replicationcontroller/busybox1 created
error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0813 01:41:24.293355   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/busybox1" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-dpt65"
generic-resources.sh:332: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:337: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:338: Successful get rc busybox0 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:339: Successful get rc busybox1 {{.spec.replicas}}: 1
(BE0813 01:41:25.283322   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0813 01:41:25.318031   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:344: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
(Bgeneric-resources.sh:345: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
(BSuccessful
message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled
horizontalpodautoscaler.autoscaling/busybox1 autoscaled
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
horizontalpodautoscaler.autoscaling "busybox0" deleted
horizontalpodautoscaler.autoscaling "busybox1" deleted
generic-resources.sh:353: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:354: Successful get rc busybox0 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:355: Successful get rc busybox1 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:359: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
(Bgeneric-resources.sh:360: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
(BSuccessful
message:service/busybox0 exposed
service/busybox1 exposed
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
generic-resources.sh:366: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:367: Successful get rc busybox0 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:368: Successful get rc busybox1 {{.spec.replicas}}: 1
(BI0813 01:41:27.690160   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/busybox0" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-mdlz5"
I0813 01:41:27.703756   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/busybox1" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-tzv4q"
generic-resources.sh:372: Successful get rc busybox0 {{.spec.replicas}}: 2
(Bgeneric-resources.sh:373: Successful get rc busybox1 {{.spec.replicas}}: 2
(BSuccessful
message:replicationcontroller/busybox0 scaled
replicationcontroller/busybox1 scaled
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
generic-resources.sh:378: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:382: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
replicationcontroller "busybox0" force deleted
replicationcontroller "busybox1" force deleted
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
generic-resources.sh:387: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
(Bdeployment.apps/nginx1-deployment created
I0813 01:41:29.280022   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx1-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx1-deployment-758b5949b6 to 2"
deployment.apps/nginx0-deployment created
error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0813 01:41:29.287263   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx1-deployment-758b5949b6" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx1-deployment-758b5949b6-kxwcr"
I0813 01:41:29.290437   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx0-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx0-deployment-75db9cdfd9 to 2"
I0813 01:41:29.296105   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx1-deployment-758b5949b6" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx1-deployment-758b5949b6-cc4hs"
I0813 01:41:29.300576   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx0-deployment-75db9cdfd9" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx0-deployment-75db9cdfd9-bh84b"
I0813 01:41:29.307258   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/nginx0-deployment-75db9cdfd9" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx0-deployment-75db9cdfd9-5jmlc"
generic-resources.sh:391: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment:
(Bgeneric-resources.sh:392: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
(Bgeneric-resources.sh:396: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
(BSuccessful
message:deployment.apps/nginx1-deployment skipped rollback (current template already matches revision 1)
deployment.apps/nginx0-deployment skipped rollback (current template already matches revision 1)
error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:Object 'Kind' is missing
deployment.apps/nginx1-deployment paused
deployment.apps/nginx0-deployment paused
generic-resources.sh:404: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true:
(BSuccessful
message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:Object 'Kind' is missing
E0813 01:41:30.447362   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
deployment.apps/nginx1-deployment resumed
deployment.apps/nginx0-deployment resumed
generic-resources.sh:410: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: <no value>:<no value>:
(BSuccessful
message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:Object 'Kind' is missing
... skipping 3 lines ...
1         <none>

deployment.apps/nginx0-deployment 
REVISION  CHANGE-CAUSE
1         <none>

error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:nginx0-deployment
Successful
message:deployment.apps/nginx1-deployment 
REVISION  CHANGE-CAUSE
1         <none>

deployment.apps/nginx0-deployment 
REVISION  CHANGE-CAUSE
1         <none>

error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:nginx1-deployment
Successful
message:deployment.apps/nginx1-deployment 
REVISION  CHANGE-CAUSE
1         <none>

deployment.apps/nginx0-deployment 
REVISION  CHANGE-CAUSE
1         <none>

error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:Object 'Kind' is missing
warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
deployment.apps "nginx1-deployment" force deleted
deployment.apps "nginx0-deployment" force deleted
error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
E0813 01:41:31.800166   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0813 01:41:32.335031   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:426: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(Breplicationcontroller/busybox0 created
I0813 01:41:32.687774   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/busybox0" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-c5dj8"
replicationcontroller/busybox1 created
error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0813 01:41:32.706592   57736 event.go:291] "Event occurred" object="namespace-1597282875-18433/busybox1" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-4k2pj"
generic-resources.sh:430: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:no rollbacker has been implemented for "ReplicationController"
no rollbacker has been implemented for "ReplicationController"
unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
... skipping 2 lines ...
message:no rollbacker has been implemented for "ReplicationController"
no rollbacker has been implemented for "ReplicationController"
unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" pausing is not supported
error: replicationcontrollers "busybox1" pausing is not supported
has:Object 'Kind' is missing
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" pausing is not supported
error: replicationcontrollers "busybox1" pausing is not supported
has:replicationcontrollers "busybox0" pausing is not supported
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" pausing is not supported
error: replicationcontrollers "busybox1" pausing is not supported
has:replicationcontrollers "busybox1" pausing is not supported
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" resuming is not supported
error: replicationcontrollers "busybox1" resuming is not supported
has:Object 'Kind' is missing
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" resuming is not supported
error: replicationcontrollers "busybox1" resuming is not supported
has:replicationcontrollers "busybox0" resuming is not supported
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" resuming is not supported
error: replicationcontrollers "busybox1" resuming is not supported
has:replicationcontrollers "busybox1" resuming is not supported
warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
replicationcontroller "busybox0" force deleted
replicationcontroller "busybox1" force deleted
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
Recording: run_namespace_tests
Running command: run_namespace_tests

+++ Running case: test-cmd.run_namespace_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_namespace_tests
+++ [0813 01:41:34] Testing kubectl(v1:namespaces)
Successful
message:Error from server (NotFound): namespaces "my-namespace" not found
has: not found
namespace/my-namespace created (dry run)
namespace/my-namespace created (server dry run)
Successful
message:Error from server (NotFound): namespaces "my-namespace" not found
has: not found
namespace/my-namespace created
core.sh:1459: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
(Bnamespace "my-namespace" deleted
E0813 01:41:37.782319   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I0813 01:41:40.162388   57736 horizontal.go:354] Horizontal Pod Autoscaler busybox0 has been deleted in namespace-1597282875-18433
I0813 01:41:40.166421   57736 horizontal.go:354] Horizontal Pod Autoscaler busybox1 has been deleted in namespace-1597282875-18433
namespace/my-namespace condition met
Successful
message:Error from server (NotFound): namespaces "my-namespace" not found
has: not found
namespace/my-namespace created
core.sh:1468: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
(BSuccessful
message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
namespace "kube-node-lease" deleted
... skipping 31 lines ...
namespace "namespace-1597282834-1673" deleted
namespace "namespace-1597282834-8022" deleted
namespace "namespace-1597282838-20774" deleted
namespace "namespace-1597282841-24599" deleted
namespace "namespace-1597282843-4849" deleted
namespace "namespace-1597282875-18433" deleted
Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
has:warning: deleting cluster-scoped resources
Successful
message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
namespace "kube-node-lease" deleted
namespace "my-namespace" deleted
namespace "namespace-1597282579-20953" deleted
... skipping 29 lines ...
namespace "namespace-1597282834-1673" deleted
namespace "namespace-1597282834-8022" deleted
namespace "namespace-1597282838-20774" deleted
namespace "namespace-1597282841-24599" deleted
namespace "namespace-1597282843-4849" deleted
namespace "namespace-1597282875-18433" deleted
Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
has:namespace "my-namespace" deleted
namespace/quotas created
core.sh:1475: Successful get namespaces/quotas {{.metadata.name}}: quotas
(Bcore.sh:1476: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name \"test-quota\" }}found{{end}}{{end}}:: :
(Bresourcequota/test-quota created (dry run)
resourcequota/test-quota created (server dry run)
core.sh:1480: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name \"test-quota\" }}found{{end}}{{end}}:: :
(Bresourcequota/test-quota created
core.sh:1483: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name \"test-quota\" }}found{{end}}{{end}}:: found:
(Bresourcequota "test-quota" deleted
I0813 01:41:43.539282   57736 resource_quota_controller.go:306] Resource quota has been deleted quotas/test-quota
namespace "quotas" deleted
E0813 01:41:45.227341   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E0813 01:41:46.530931   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
core.sh:1495: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"other\" }}found{{end}}{{end}}:: :
(BI0813 01:41:49.081486   54230 client.go:360] parsed scheme: "passthrough"
I0813 01:41:49.081595   54230 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:41:49.081616   54230 clientconn.go:948] ClientConn switching balancer to "pick_first"
namespace/other created
core.sh:1499: Successful get namespaces/other {{.metadata.name}}: other
(Bcore.sh:1503: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/valid-pod created
core.sh:1507: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bcore.sh:1509: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(BSuccessful
message:error: a resource cannot be retrieved by name across all namespaces
has:a resource cannot be retrieved by name across all namespaces
core.sh:1516: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "valid-pod" force deleted
core.sh:1520: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
(Bnamespace "other" deleted
I0813 01:41:51.156281   57736 namespace_controller.go:185] Namespace has been deleted my-namespace
E0813 01:41:51.422820   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I0813 01:41:51.997953   57736 namespace_controller.go:185] Namespace has been deleted kube-node-lease
I0813 01:41:52.025229   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282585-6016
I0813 01:41:52.041281   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282580-27056
I0813 01:41:52.051932   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282579-20953
I0813 01:41:52.070402   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282621-17721
I0813 01:41:52.102278   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282614-26163
... skipping 26 lines ...
I0813 01:41:52.860130   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282827-1657
I0813 01:41:52.871746   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282841-24599
I0813 01:41:52.875951   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282838-20774
I0813 01:41:52.883234   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282843-4849
I0813 01:41:52.963493   57736 namespace_controller.go:185] Namespace has been deleted namespace-1597282875-18433
I0813 01:41:53.770841   57736 namespace_controller.go:185] Namespace has been deleted quotas
E0813 01:41:54.864606   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
+++ exit code: 0
Recording: run_secrets_test
Running command: run_secrets_test

+++ Running case: test-cmd.run_secrets_test 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 100 lines ...
(Bconfigmap/test-configmap created
configmap/test-binary-configmap created
core.sh:51: Successful get configmap/test-configmap --namespace=test-configmaps {{.metadata.name}}: test-configmap
(Bcore.sh:52: Successful get configmap/test-binary-configmap --namespace=test-configmaps {{.metadata.name}}: test-binary-configmap
(Bconfigmap "test-configmap" deleted
configmap "test-binary-configmap" deleted
E0813 01:42:13.287143   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
namespace "test-configmaps" deleted
I0813 01:42:13.478229   57736 namespace_controller.go:185] Namespace has been deleted test-secrets
E0813 01:42:14.403944   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
+++ exit code: 0
Recording: run_client_config_tests
Running command: run_client_config_tests

+++ Running case: test-cmd.run_client_config_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_client_config_tests
+++ [0813 01:42:18] Creating namespace namespace-1597282938-18990
namespace/namespace-1597282938-18990 created
Context "test" modified.
+++ [0813 01:42:18] Testing client config
Successful
message:error: stat missing: no such file or directory
has:missing: no such file or directory
Successful
message:error: stat missing: no such file or directory
has:missing: no such file or directory
Successful
message:error: stat missing: no such file or directory
has:missing: no such file or directory
Successful
message:Error in configuration: context was not found for specified context: missing-context
has:context was not found for specified context: missing-context
Successful
message:error: no server found for cluster "missing-cluster"
has:no server found for cluster "missing-cluster"
Successful
message:error: auth info "missing-user" does not exist
has:auth info "missing-user" does not exist
Successful
message:error: error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1" in scheme "k8s.io/client-go/tools/clientcmd/api/latest/latest.go:50"
has:error loading config file
Successful
message:error: stat missing-config: no such file or directory
has:no such file or directory
+++ exit code: 0
Recording: run_service_accounts_tests
Running command: run_service_accounts_tests

+++ Running case: test-cmd.run_service_accounts_tests 
... skipping 12 lines ...
core.sh:953: Successful get serviceaccount --namespace=test-service-accounts {{range.items}}{{ if eq .metadata.name \"test-service-account\" }}found{{end}}{{end}}:: :
(Bserviceaccount/test-service-account created
core.sh:957: Successful get serviceaccount/test-service-account --namespace=test-service-accounts {{.metadata.name}}: test-service-account
(Bserviceaccount "test-service-account" deleted
namespace "test-service-accounts" deleted
I0813 01:42:23.460431   57736 namespace_controller.go:185] Namespace has been deleted test-configmaps
E0813 01:42:25.333727   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I0813 01:42:26.782328   54230 client.go:360] parsed scheme: "passthrough"
I0813 01:42:26.782386   54230 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:42:26.782397   54230 clientconn.go:948] ClientConn switching balancer to "pick_first"
+++ exit code: 0
Recording: run_job_tests
Running command: run_job_tests
... skipping 22 lines ...
Labels:                        <none>
Annotations:                   <none>
Schedule:                      59 23 31 2 *
Concurrency Policy:            Allow
Suspend:                       False
Successful Job History Limit:  3
Failed Job History Limit:      1
Starting Deadline Seconds:     <unset>
Selector:                      <unset>
Parallelism:                   <unset>
Completions:                   <unset>
Pod Template:
  Labels:  <none>
... skipping 37 lines ...
Labels:         controller-uid=ba8f9c0c-09fe-4b9d-8951-587644643821
                job-name=test-job
Annotations:    cronjob.kubernetes.io/instantiate: manual
Parallelism:    1
Completions:    1
Start Time:     Thu, 13 Aug 2020 01:42:31 +0000
Pods Statuses:  1 Running / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  controller-uid=ba8f9c0c-09fe-4b9d-8951-587644643821
           job-name=test-job
  Containers:
   pi:
    Image:      k8s.gcr.io/perl
... skipping 15 lines ...
  ----    ------            ----  ----            -------
  Normal  SuccessfulCreate  1s    job-controller  Created pod: test-job-vfjn6
job.batch "test-job" deleted
cronjob.batch "pi" deleted
namespace "test-jobs" deleted
I0813 01:42:32.738987   57736 namespace_controller.go:185] Namespace has been deleted test-service-accounts
E0813 01:42:33.306532   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
+++ exit code: 0
Recording: run_create_job_tests
Running command: run_create_job_tests

+++ Running case: test-cmd.run_create_job_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 440 lines ...
  type: ClusterIP
status:
  loadBalancer: {}
Successful
message:kubectl-create kubectl-set
has:kubectl-set
error: you must specify resources by --filename when --local is set.
Example resource specifications include:
   '-f rsrc.yaml'
   '--filename=rsrc.json'
core.sh:1020: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
(Bservice/redis-master selector updated
Successful
message:Error from server (Conflict): Operation cannot be fulfilled on services "redis-master": the object has been modified; please apply your changes to the latest version and try again
has:Conflict
core.sh:1033: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
(Bservice "redis-master" deleted
core.sh:1040: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
(Bcore.sh:1044: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
(Bservice/redis-master created
core.sh:1048: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
(Bcore.sh:1052: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
(Bservice/service-v1-test created
core.sh:1073: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test:
(Bservice/service-v1-test replaced
E0813 01:42:50.533198   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
core.sh:1080: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:service-v1-test:
(Bservice "redis-master" deleted
service "service-v1-test" deleted
core.sh:1088: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
(Bcore.sh:1092: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
(Bservice/redis-master created
... skipping 114 lines ...
 (dry run)
daemonset.apps/bind rolled back (server dry run)
apps.sh:87: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
(Bapps.sh:88: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bapps.sh:89: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
(Bdaemonset.apps/bind rolled back
E0813 01:43:04.792487   57736 daemon_controller.go:320] namespace-1597282980-16389/bind failed with : error storing status for daemon set &v1.DaemonSet{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"bind", GenerateName:"", Namespace:"namespace-1597282980-16389", SelfLink:"/apis/apps/v1/namespaces/namespace-1597282980-16389/daemonsets/bind", UID:"1ca50731-a35a-4032-95d0-687f1efbbd5b", ResourceVersion:"2153", Generation:3, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:63732879781, loc:(*time.Location)(0x6a37c80)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string{"deprecated.daemonset.template.generation":"3", "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"DaemonSet\",\"metadata\":{\"annotations\":{\"kubernetes.io/change-cause\":\"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true\"},\"labels\":{\"service\":\"bind\"},\"name\":\"bind\",\"namespace\":\"namespace-1597282980-16389\"},\"spec\":{\"selector\":{\"matchLabels\":{\"service\":\"bind\"}},\"template\":{\"metadata\":{\"labels\":{\"service\":\"bind\"}},\"spec\":{\"affinity\":{\"podAntiAffinity\":{\"requiredDuringSchedulingIgnoredDuringExecution\":[{\"labelSelector\":{\"matchExpressions\":[{\"key\":\"service\",\"operator\":\"In\",\"values\":[\"bind\"]}]},\"namespaces\":[],\"topologyKey\":\"kubernetes.io/hostname\"}]}},\"containers\":[{\"image\":\"k8s.gcr.io/pause:latest\",\"name\":\"kubernetes-pause\"},{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"app\"}]}},\"updateStrategy\":{\"rollingUpdate\":{\"maxUnavailable\":\"10%\"},\"type\":\"RollingUpdate\"}}}\n", "kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"}, OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kube-controller-manager", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc001d90fa0), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc001d90fe0)}, v1.ManagedFieldsEntry{Manager:"kubectl-client-side-apply", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc001d91020), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc001d91060)}, v1.ManagedFieldsEntry{Manager:"kubectl", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc001d910a0), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc001d910c0)}}}, Spec:v1.DaemonSetSpec{Selector:(*v1.LabelSelector)(0xc001d910e0), Template:v1.PodTemplateSpec{ObjectMeta:v1.ObjectMeta{Name:"", GenerateName:"", Namespace:"", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, Spec:v1.PodSpec{Volumes:[]v1.Volume(nil), InitContainers:[]v1.Container(nil), Containers:[]v1.Container{v1.Container{Name:"kubernetes-pause", Image:"k8s.gcr.io/pause:2.0", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), StartupProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}}, EphemeralContainers:[]v1.EphemeralContainer(nil), RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc002671248), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"", DeprecatedServiceAccount:"", AutomountServiceAccountToken:(*bool)(nil), NodeName:"", HostNetwork:false, HostPID:false, HostIPC:false, ShareProcessNamespace:(*bool)(nil), SecurityContext:(*v1.PodSecurityContext)(0xc0000fd1f0), ImagePullSecrets:[]v1.LocalObjectReference(nil), Hostname:"", Subdomain:"", Affinity:(*v1.Affinity)(0xc001d91100), SchedulerName:"default-scheduler", Tolerations:[]v1.Toleration(nil), HostAliases:[]v1.HostAlias(nil), PriorityClassName:"", Priority:(*int32)(nil), DNSConfig:(*v1.PodDNSConfig)(nil), ReadinessGates:[]v1.PodReadinessGate(nil), RuntimeClassName:(*string)(nil), EnableServiceLinks:(*bool)(nil), PreemptionPolicy:(*v1.PreemptionPolicy)(nil), Overhead:v1.ResourceList(nil), TopologySpreadConstraints:[]v1.TopologySpreadConstraint(nil), SetHostnameAsFQDN:(*bool)(nil)}}, UpdateStrategy:v1.DaemonSetUpdateStrategy{Type:"RollingUpdate", RollingUpdate:(*v1.RollingUpdateDaemonSet)(0xc0008bd4c8)}, MinReadySeconds:0, RevisionHistoryLimit:(*int32)(0xc00267129c)}, Status:v1.DaemonSetStatus{CurrentNumberScheduled:0, NumberMisscheduled:0, DesiredNumberScheduled:0, NumberReady:0, ObservedGeneration:2, UpdatedNumberScheduled:0, NumberAvailable:0, NumberUnavailable:0, CollisionCount:(*int32)(nil), Conditions:[]v1.DaemonSetCondition(nil)}}: Operation cannot be fulfilled on daemonsets.apps "bind": the object has been modified; please apply your changes to the latest version and try again
apps.sh:92: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:93: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(BSuccessful
message:error: unable to find specified revision 1000000 in history
has:unable to find specified revision
apps.sh:97: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:98: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(Bdaemonset.apps/bind rolled back
E0813 01:43:05.868482   57736 daemon_controller.go:320] namespace-1597282980-16389/bind failed with : error storing status for daemon set &v1.DaemonSet{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"bind", GenerateName:"", Namespace:"namespace-1597282980-16389", SelfLink:"/apis/apps/v1/namespaces/namespace-1597282980-16389/daemonsets/bind", UID:"1ca50731-a35a-4032-95d0-687f1efbbd5b", ResourceVersion:"2158", Generation:4, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:63732879781, loc:(*time.Location)(0x6a37c80)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string{"deprecated.daemonset.template.generation":"4", "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"DaemonSet\",\"metadata\":{\"annotations\":{\"kubernetes.io/change-cause\":\"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true\"},\"labels\":{\"service\":\"bind\"},\"name\":\"bind\",\"namespace\":\"namespace-1597282980-16389\"},\"spec\":{\"selector\":{\"matchLabels\":{\"service\":\"bind\"}},\"template\":{\"metadata\":{\"labels\":{\"service\":\"bind\"}},\"spec\":{\"affinity\":{\"podAntiAffinity\":{\"requiredDuringSchedulingIgnoredDuringExecution\":[{\"labelSelector\":{\"matchExpressions\":[{\"key\":\"service\",\"operator\":\"In\",\"values\":[\"bind\"]}]},\"namespaces\":[],\"topologyKey\":\"kubernetes.io/hostname\"}]}},\"containers\":[{\"image\":\"k8s.gcr.io/pause:latest\",\"name\":\"kubernetes-pause\"},{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"app\"}]}},\"updateStrategy\":{\"rollingUpdate\":{\"maxUnavailable\":\"10%\"},\"type\":\"RollingUpdate\"}}}\n", "kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"}, OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry{v1.ManagedFieldsEntry{Manager:"kubectl-client-side-apply", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc000e56b60), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc000e56ba0)}, v1.ManagedFieldsEntry{Manager:"kube-controller-manager", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc000e56be0), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc000e56c20)}, v1.ManagedFieldsEntry{Manager:"kubectl", Operation:"Update", APIVersion:"apps/v1", Time:(*v1.Time)(0xc000e56c60), FieldsType:"FieldsV1", FieldsV1:(*v1.FieldsV1)(0xc000e56ca0)}}}, Spec:v1.DaemonSetSpec{Selector:(*v1.LabelSelector)(0xc000e56d20), Template:v1.PodTemplateSpec{ObjectMeta:v1.ObjectMeta{Name:"", GenerateName:"", Namespace:"", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Finalizers:[]string(nil), ClusterName:"", ManagedFields:[]v1.ManagedFieldsEntry(nil)}, Spec:v1.PodSpec{Volumes:[]v1.Volume(nil), InitContainers:[]v1.Container(nil), Containers:[]v1.Container{v1.Container{Name:"kubernetes-pause", Image:"k8s.gcr.io/pause:latest", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), StartupProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}, v1.Container{Name:"app", Image:"k8s.gcr.io/nginx:test-cmd", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), StartupProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}}, EphemeralContainers:[]v1.EphemeralContainer(nil), RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc0030fe358), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"", DeprecatedServiceAccount:"", AutomountServiceAccountToken:(*bool)(nil), NodeName:"", HostNetwork:false, HostPID:false, HostIPC:false, ShareProcessNamespace:(*bool)(nil), SecurityContext:(*v1.PodSecurityContext)(0xc002322000), ImagePullSecrets:[]v1.LocalObjectReference(nil), Hostname:"", Subdomain:"", Affinity:(*v1.Affinity)(0xc000e56d60), SchedulerName:"default-scheduler", Tolerations:[]v1.Toleration(nil), HostAliases:[]v1.HostAlias(nil), PriorityClassName:"", Priority:(*int32)(nil), DNSConfig:(*v1.PodDNSConfig)(nil), ReadinessGates:[]v1.PodReadinessGate(nil), RuntimeClassName:(*string)(nil), EnableServiceLinks:(*bool)(nil), PreemptionPolicy:(*v1.PreemptionPolicy)(nil), Overhead:v1.ResourceList(nil), TopologySpreadConstraints:[]v1.TopologySpreadConstraint(nil), SetHostnameAsFQDN:(*bool)(nil)}}, UpdateStrategy:v1.DaemonSetUpdateStrategy{Type:"RollingUpdate", RollingUpdate:(*v1.RollingUpdateDaemonSet)(0xc002012088)}, MinReadySeconds:0, RevisionHistoryLimit:(*int32)(0xc0030fe3ac)}, Status:v1.DaemonSetStatus{CurrentNumberScheduled:0, NumberMisscheduled:0, DesiredNumberScheduled:0, NumberReady:0, ObservedGeneration:3, UpdatedNumberScheduled:0, NumberAvailable:0, NumberUnavailable:0, CollisionCount:(*int32)(nil), Conditions:[]v1.DaemonSetCondition(nil)}}: Operation cannot be fulfilled on daemonsets.apps "bind": the object has been modified; please apply your changes to the latest version and try again
apps.sh:101: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
(Bapps.sh:102: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bapps.sh:103: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
(Bdaemonset.apps "bind" deleted
+++ exit code: 0
Recording: run_rc_tests
... skipping 32 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 12 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
matched Pod Template:
matched Labels:
matched Selector:
matched Replicas:
matched Pods Status:
matched Volumes:
E0813 01:43:09.512992   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
matched GET_HOSTS_FROM:
Successful describe rc:
Name:         frontend
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 11 lines ...
Namespace:    namespace-1597282986-23623
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 15 lines ...
(Bcore.sh:1224: Successful get rc frontend {{.spec.replicas}}: 3
(Breplicationcontroller/frontend scaled
E0813 01:43:10.419839   57736 replica_set.go:201] ReplicaSet has no controller: &ReplicaSet{ObjectMeta:{frontend  namespace-1597282986-23623 /api/v1/namespaces/namespace-1597282986-23623/replicationcontrollers/frontend 1c4c7535-b10a-4f1b-9ea4-54d31f705231 2195 2 2020-08-13 01:43:08 +0000 UTC <nil> <nil> map[app:guestbook tier:frontend] map[] [] []  [{kube-controller-manager Update v1 2020-08-13 01:43:08 +0000 UTC FieldsV1 {"f:status":{"f:fullyLabeledReplicas":{},"f:observedGeneration":{},"f:replicas":{}}}} {kubectl-create Update v1 2020-08-13 01:43:08 +0000 UTC FieldsV1 {"f:metadata":{"f:labels":{".":{},"f:app":{},"f:tier":{}}},"f:spec":{"f:replicas":{},"f:selector":{".":{},"f:app":{},"f:tier":{}},"f:template":{".":{},"f:metadata":{".":{},"f:creationTimestamp":{},"f:labels":{".":{},"f:app":{},"f:tier":{}}},"f:spec":{".":{},"f:containers":{".":{},"k:{\"name\":\"php-redis\"}":{".":{},"f:env":{".":{},"k:{\"name\":\"GET_HOSTS_FROM\"}":{".":{},"f:name":{},"f:value":{}}},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:ports":{".":{},"k:{\"containerPort\":80,\"protocol\":\"TCP\"}":{".":{},"f:containerPort":{},"f:protocol":{}}},"f:resources":{".":{},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}}}}}]},Spec:ReplicaSetSpec{Replicas:*2,Selector:&v1.LabelSelector{MatchLabels:map[string]string{app: guestbook,tier: frontend,},MatchExpressions:[]LabelSelectorRequirement{},},Template:{{      0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[app:guestbook tier:frontend] map[] [] []  []} {[] [] [{php-redis gcr.io/google_samples/gb-frontend:v4 [] []  [{ 0 80 TCP }] [] [{GET_HOSTS_FROM dns nil}] {map[] map[cpu:{{100 -3} {<nil>} 100m DecimalSI} memory:{{104857600 0} {<nil>} 100Mi BinarySI}]} [] [] nil nil nil nil /dev/termination-log File IfNotPresent nil false false false}] [] Always 0xc0026b7418 <nil> ClusterFirst map[]   <nil>  false false false <nil> PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,} []   nil default-scheduler [] []  <nil> nil [] <nil> <nil> <nil> map[] [] <nil>}},MinReadySeconds:0,},Status:ReplicaSetStatus{Replicas:3,FullyLabeledReplicas:3,ObservedGeneration:1,ReadyReplicas:0,AvailableReplicas:0,Conditions:[]ReplicaSetCondition{},},}
I0813 01:43:10.426368   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/frontend" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: frontend-nqqcq"
core.sh:1228: Successful get rc frontend {{.spec.replicas}}: 2
(Bcore.sh:1232: Successful get rc frontend {{.spec.replicas}}: 2
(Berror: Expected replicas to be 3, was 2
core.sh:1236: Successful get rc frontend {{.spec.replicas}}: 2
(Bcore.sh:1240: Successful get rc frontend {{.spec.replicas}}: 2
(Breplicationcontroller/frontend scaled
I0813 01:43:11.506474   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/frontend" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-wwpc4"
core.sh:1244: Successful get rc frontend {{.spec.replicas}}: 3
(Bcore.sh:1248: Successful get rc frontend {{.spec.replicas}}: 3
... skipping 31 lines ...
(Bdeployment.apps "nginx-deployment" deleted
Successful
message:service/expose-test-deployment exposed
has:service/expose-test-deployment exposed
service "expose-test-deployment" deleted
Successful
message:error: couldn't retrieve selectors via --selector flag or introspection: invalid deployment: no selectors, therefore cannot be exposed
See 'kubectl expose -h' for help and examples
has:invalid deployment: no selectors
deployment.apps/nginx-deployment created
I0813 01:43:16.377519   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-76b5cd66f5 to 3"
I0813 01:43:16.381650   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-76b5cd66f5" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-76b5cd66f5-tf7hq"
I0813 01:43:16.387061   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-76b5cd66f5" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-76b5cd66f5-vlxd5"
I0813 01:43:16.387192   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-76b5cd66f5" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-76b5cd66f5-qb55g"
core.sh:1291: Successful get deployment nginx-deployment {{.spec.replicas}}: 3
(Bservice/nginx-deployment exposed
E0813 01:43:16.959698   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
core.sh:1295: Successful get service nginx-deployment {{(index .spec.ports 0).port}}: 80
(Bdeployment.apps "nginx-deployment" deleted
service "nginx-deployment" deleted
I0813 01:43:17.748450   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/frontend" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-7q6d5"
replicationcontroller/frontend created
I0813 01:43:17.753758   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/frontend" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-ncn9z"
... skipping 14 lines ...
service "frontend" deleted
service "frontend-2" deleted
service "frontend-3" deleted
service "frontend-4" deleted
service "frontend-5" deleted
Successful
message:error: cannot expose a Node
has:cannot expose
Successful
message:The Service "invalid-large-service-name-that-has-more-than-sixty-three-characters" is invalid: metadata.name: Invalid value: "invalid-large-service-name-that-has-more-than-sixty-three-characters": must be no more than 63 characters
has:metadata.name: Invalid value
Successful
message:service/kubernetes-serve-hostname-testing-sixty-three-characters-in-len exposed
... skipping 30 lines ...
(Bhorizontalpodautoscaler.autoscaling/frontend autoscaled
core.sh:1391: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 70
(Bhorizontalpodautoscaler.autoscaling "frontend" deleted
horizontalpodautoscaler.autoscaling/frontend autoscaled
core.sh:1395: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
(Bhorizontalpodautoscaler.autoscaling "frontend" deleted
Error: required flag(s) "max" not set
replicationcontroller "frontend" deleted
core.sh:1404: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
(BapiVersion: apps/v1
kind: Deployment
metadata:
  creationTimestamp: null
... skipping 24 lines ...
          limits:
            cpu: 300m
          requests:
            cpu: 300m
      terminationGracePeriodSeconds: 0
status: {}
Error from server (NotFound): deployments.apps "nginx-deployment-resources" not found
deployment.apps/nginx-deployment-resources created
I0813 01:43:28.231861   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-748ddcb48b to 3"
I0813 01:43:28.244372   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-748ddcb48b-sqgds"
I0813 01:43:28.255170   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-748ddcb48b-ww4jc"
I0813 01:43:28.260512   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-748ddcb48b-jz797"
core.sh:1410: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment-resources:
(Bcore.sh:1411: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bcore.sh:1412: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Bdeployment.apps/nginx-deployment-resources resource requirements updated
I0813 01:43:30.123940   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-7bfb7d56b6 to 1"
I0813 01:43:30.140100   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources-7bfb7d56b6" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-7bfb7d56b6-vlffp"
E0813 01:43:30.328557   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
core.sh:1415: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 100m:
(Bcore.sh:1416: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 100m:
(Berror: unable to find container named redis
deployment.apps/nginx-deployment-resources resource requirements updated
I0813 01:43:31.808011   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-resources-748ddcb48b to 2"
I0813 01:43:31.822037   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-deployment-resources-748ddcb48b-sqgds"
I0813 01:43:31.830951   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-75dbcccf44 to 1"
I0813 01:43:31.848599   57736 event.go:291] "Event occurred" object="namespace-1597282986-23623/nginx-deployment-resources-75dbcccf44" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-75dbcccf44-sznzm"
core.sh:1421: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m:
... skipping 387 lines ...
    status: "True"
    type: Progressing
  observedGeneration: 4
  replicas: 4
  unavailableReplicas: 4
  updatedReplicas: 1
error: you must specify resources by --filename when --local is set.
Example resource specifications include:
   '-f rsrc.yaml'
   '--filename=rsrc.json'
core.sh:1432: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m:
(Bcore.sh:1433: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 300m:
(BE0813 01:43:37.967178   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
core.sh:1434: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.requests.cpu}}:{{end}}: 300m:
(Bdeployment.apps "nginx-deployment-resources" deleted
+++ exit code: 0
Recording: run_deployment_tests
Running command: run_deployment_tests

... skipping 45 lines ...
                pod-template-hash=69dd6dcd84
Annotations:    deployment.kubernetes.io/desired-replicas: 1
                deployment.kubernetes.io/max-replicas: 2
                deployment.kubernetes.io/revision: 1
Controlled By:  Deployment/test-nginx-apps
Replicas:       1 current / 1 desired
Pods Status:    0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=test-nginx-apps
           pod-template-hash=69dd6dcd84
  Containers:
   nginx:
    Image:        k8s.gcr.io/nginx:test-cmd
... skipping 104 lines ...
apps.sh:304: Successful get deployment.apps {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(B    Image:	k8s.gcr.io/nginx:test-cmd
deployment.apps/nginx rolled back (server dry run)
apps.sh:308: Successful get deployment.apps {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bdeployment.apps/nginx rolled back
apps.sh:312: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Berror: unable to find specified revision 1000000 in history
apps.sh:315: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bdeployment.apps/nginx rolled back
apps.sh:319: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bdeployment.apps/nginx paused
error: you cannot rollback a paused deployment; resume it first with 'kubectl rollout resume deployment/nginx' and try again
error: deployments.apps "nginx" can't restart paused deployment (run rollout resume first)
deployment.apps/nginx resumed
deployment.apps/nginx rolled back
    deployment.kubernetes.io/revision-history: 1,3
error: desired revision (3) is different from the running revision (5)
deployment.apps/nginx restarted
I0813 01:43:59.070488   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-54785cbcb8 to 2"
I0813 01:43:59.079115   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-54785cbcb8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-54785cbcb8-pj5sd"
I0813 01:43:59.081610   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-6975dcfd85 to 1"
I0813 01:43:59.091262   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-6975dcfd85" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6975dcfd85-zr8ff"
Successful
... skipping 149 lines ...
(Bapps.sh:363: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Bdeployment.apps/nginx-deployment image updated
I0813 01:44:03.633998   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-6dd48b9849 to 1"
I0813 01:44:03.641657   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment-6dd48b9849" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-6dd48b9849-bq4d6"
apps.sh:366: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bapps.sh:367: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Berror: unable to find container named "redis"
deployment.apps/nginx-deployment image updated
apps.sh:372: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bapps.sh:373: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Bdeployment.apps/nginx-deployment image updated
apps.sh:376: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bapps.sh:377: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
... skipping 14 lines ...
(Bdeployment.apps/nginx-deployment created
I0813 01:44:07.461265   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-b8c4df945 to 3"
I0813 01:44:07.466697   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment-b8c4df945" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-b8c4df945-j2nfw"
I0813 01:44:07.470788   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment-b8c4df945" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-b8c4df945-5sbwq"
I0813 01:44:07.473907   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment-b8c4df945" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-b8c4df945-pdvg8"
configmap/test-set-env-config created
E0813 01:44:08.181415   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
secret/test-set-env-secret created
apps.sh:400: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment:
(Bapps.sh:402: Successful get configmaps/test-set-env-config {{.metadata.name}}: test-set-env-config
(Bapps.sh:403: Successful get secret {{range.items}}{{.metadata.name}}:{{end}}: test-set-env-secret:
(Bdeployment.apps/nginx-deployment env updated
I0813 01:44:08.914072   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-59b7fccd97 to 1"
I0813 01:44:08.921725   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment-59b7fccd97" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-59b7fccd97-qc6j7"
apps.sh:407: Successful get deploy nginx-deployment {{ (index (index .spec.template.spec.containers 0).env 0).name}}: KEY_2
(Bapps.sh:409: Successful get deploy nginx-deployment {{ len (index .spec.template.spec.containers 0).env }}: 1
(Bdeployment.apps/nginx-deployment env updated (dry run)
E0813 01:44:09.478803   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
deployment.apps/nginx-deployment env updated (server dry run)
apps.sh:413: Successful get deploy nginx-deployment {{ len (index .spec.template.spec.containers 0).env }}: 1
(Bdeployment.apps/nginx-deployment env updated
I0813 01:44:10.195494   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-b8c4df945 to 2"
I0813 01:44:10.206724   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment-b8c4df945" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-deployment-b8c4df945-j2nfw"
I0813 01:44:10.207614   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-7f789d7c5f to 1"
... skipping 22 lines ...
deployment.apps "nginx-deployment" deleted
I0813 01:44:11.678229   54230 client.go:360] parsed scheme: "passthrough"
I0813 01:44:11.678284   54230 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:44:11.678297   54230 clientconn.go:948] ClientConn switching balancer to "pick_first"
I0813 01:44:11.699453   57736 event.go:291] "Event occurred" object="namespace-1597283019-14649/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-68d657fb6 to 0"
configmap "test-set-env-config" deleted
E0813 01:44:11.889228   57736 replica_set.go:532] sync "namespace-1597283019-14649/nginx-deployment-59b7fccd97" failed with replicasets.apps "nginx-deployment-59b7fccd97" not found
E0813 01:44:11.989644   57736 replica_set.go:532] sync "namespace-1597283019-14649/nginx-deployment-57ddd474c4" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-57ddd474c4": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1597283019-14649/nginx-deployment-57ddd474c4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d9076ed3-cbdb-4ed1-9cfa-ee59ba23b245, UID in object meta: 
E0813 01:44:12.038941   57736 replica_set.go:532] sync "namespace-1597283019-14649/nginx-deployment-5fbc8fbcbf" failed with replicasets.apps "nginx-deployment-5fbc8fbcbf" not found
secret "test-set-env-secret" deleted
+++ exit code: 0
E0813 01:44:12.139559   57736 replica_set.go:532] sync "namespace-1597283019-14649/nginx-deployment-5f8c874568" failed with replicasets.apps "nginx-deployment-5f8c874568" not found
E0813 01:44:12.189349   57736 replica_set.go:532] sync "namespace-1597283019-14649/nginx-deployment-7584fc66fd" failed with replicasets.apps "nginx-deployment-7584fc66fd" not found
Recording: run_rs_tests
Running command: run_rs_tests

+++ Running case: test-cmd.run_rs_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_rs_tests
... skipping 21 lines ...
(Bapps.sh:560: Successful get pods -l "tier=frontend" {{range.items}}{{(index .spec.containers 0).name}}:{{end}}: php-redis:php-redis:php-redis:
(Bpod "frontend-gg6mf" deleted
pod "frontend-pxqkh" deleted
pod "frontend-wqwn7" deleted
apps.sh:563: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bapps.sh:567: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
(BE0813 01:44:15.850046   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
replicaset.apps/frontend created
I0813 01:44:15.855912   57736 event.go:291] "Event occurred" object="namespace-1597283052-8427/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-25wbg"
I0813 01:44:15.859129   57736 event.go:291] "Event occurred" object="namespace-1597283052-8427/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-2xdc8"
I0813 01:44:15.862402   57736 event.go:291] "Event occurred" object="namespace-1597283052-8427/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-n9tw6"
apps.sh:571: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: frontend:
(Bmatched Name:
... skipping 8 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 9 lines ...
Events:
  Type    Reason            Age   From                   Message
  ----    ------            ----  ----                   -------
  Normal  SuccessfulCreate  1s    replicaset-controller  Created pod: frontend-25wbg
  Normal  SuccessfulCreate  1s    replicaset-controller  Created pod: frontend-2xdc8
  Normal  SuccessfulCreate  1s    replicaset-controller  Created pod: frontend-n9tw6
(BE0813 01:44:16.482648   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
apps.sh:575: Successful describe
Name:         frontend
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 18 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 12 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 25 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 11 lines ...
Namespace:    namespace-1597283052-8427
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 216 lines ...
horizontalpodautoscaler.autoscaling/frontend autoscaled
apps.sh:705: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
(BSuccessful
message:kubectl-autoscale
has:kubectl-autoscale
horizontalpodautoscaler.autoscaling "frontend" deleted
Error: required flag(s) "max" not set
replicaset.apps "frontend" deleted
+++ exit code: 0
Recording: run_stateful_set_tests
Running command: run_stateful_set_tests

+++ Running case: test-cmd.run_stateful_set_tests 
... skipping 61 lines ...
(Bapps.sh:465: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:466: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
(Bstatefulset.apps/nginx rolled back
apps.sh:469: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
(Bapps.sh:470: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(BSuccessful
message:error: unable to find specified revision 1000000 in history
has:unable to find specified revision
apps.sh:474: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
(Bapps.sh:475: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(Bstatefulset.apps/nginx rolled back
apps.sh:478: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.8:
(Bapps.sh:479: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
... skipping 58 lines ...
Name:         mock
Namespace:    namespace-1597283082-5790
Selector:     app=mock
Labels:       app=mock
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:2.0
    Port:         9949/TCP
... skipping 57 lines ...
Name:         mock
Namespace:    namespace-1597283082-5790
Selector:     app=mock
Labels:       app=mock
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:2.0
    Port:         9949/TCP
... skipping 59 lines ...
Name:         mock
Namespace:    namespace-1597283082-5790
Selector:     app=mock
Labels:       app=mock
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:2.0
    Port:         9949/TCP
... skipping 17 lines ...
generic-resources.sh:114: Successful get services mock {{.metadata.labels.status}}: edited
(Bgeneric-resources.sh:120: Successful get rc mock {{.metadata.labels.status}}: edited
(Bservice/mock labeled
replicationcontroller/mock labeled
generic-resources.sh:134: Successful get services mock {{.metadata.labels.labeled}}: true
(Bgeneric-resources.sh:140: Successful get rc mock {{.metadata.labels.labeled}}: true
(BE0813 01:44:55.187215   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
service/mock annotated
replicationcontroller/mock annotated
generic-resources.sh:153: Successful get services mock {{.metadata.annotations.annotated}}: true
(Bgeneric-resources.sh:159: Successful get rc mock {{.metadata.annotations.annotated}}: true
(Bservice "mock" deleted
replicationcontroller "mock" deleted
... skipping 12 lines ...
Namespace:    namespace-1597283082-5790
Selector:     app=mock
Labels:       app=mock
              status=replaced
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:2.0
    Port:         9949/TCP
... skipping 11 lines ...
Namespace:    namespace-1597283082-5790
Selector:     app=mock2
Labels:       app=mock2
              status=replaced
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock2
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:2.0
    Port:         9949/TCP
... skipping 84 lines ...
service "mock2" deleted
generic-resources.sh:173: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: 
(Bgeneric-resources.sh:174: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(Bservice/mock created
replicationcontroller/mock created
I0813 01:45:04.873494   57736 event.go:291] "Event occurred" object="namespace-1597283082-5790/mock" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: mock-86q7c"
E0813 01:45:05.022239   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:180: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: mock:
(Bgeneric-resources.sh:181: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: mock:
(Bservice "mock" deleted
replicationcontroller "mock" deleted
generic-resources.sh:187: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: 
(Bgeneric-resources.sh:188: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 7 lines ...
+++ [0813 01:45:06] Creating namespace namespace-1597283106-9243
namespace/namespace-1597283106-9243 created
Context "test" modified.
+++ [0813 01:45:06] Testing persistent volumes
storage.sh:30: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpersistentvolume/pv0001 created
E0813 01:45:06.842310   57736 pv_protection_controller.go:118] PV pv0001 failed with : Operation cannot be fulfilled on persistentvolumes "pv0001": the object has been modified; please apply your changes to the latest version and try again
E0813 01:45:06.946310   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
storage.sh:33: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0001:
(Bpersistentvolume "pv0001" deleted
persistentvolume/pv0002 created
storage.sh:36: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0002:
(Bpersistentvolume "pv0002" deleted
persistentvolume/pv0003 created
E0813 01:45:08.287367   57736 pv_protection_controller.go:118] PV pv0003 failed with : Operation cannot be fulfilled on persistentvolumes "pv0003": the object has been modified; please apply your changes to the latest version and try again
storage.sh:39: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0003:
(Bpersistentvolume "pv0003" deleted
storage.sh:42: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpersistentvolume/pv0001 created
storage.sh:45: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0001:
(BSuccessful
... skipping 14 lines ...
+++ command: run_persistent_volume_claims_tests
+++ [0813 01:45:09] Creating namespace namespace-1597283109-19691
namespace/namespace-1597283109-19691 created
Context "test" modified.
+++ [0813 01:45:10] Testing persistent volumes claims
storage.sh:64: Successful get pvc {{range.items}}{{.metadata.name}}:{{end}}: 
(BE0813 01:45:10.547915   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I0813 01:45:10.642004   57736 event.go:291] "Event occurred" object="namespace-1597283109-19691/myclaim-1" kind="PersistentVolumeClaim" apiVersion="v1" type="Normal" reason="FailedBinding" message="no persistent volumes available for this claim and no storage class is set"
persistentvolumeclaim/myclaim-1 created
I0813 01:45:10.646512   57736 event.go:291] "Event occurred" object="namespace-1597283109-19691/myclaim-1" kind="PersistentVolumeClaim" apiVersion="v1" type="Normal" reason="FailedBinding" message="no persistent volumes available for this claim and no storage class is set"
storage.sh:67: Successful get pvc {{range.items}}{{.metadata.name}}:{{end}}: myclaim-1:
(Bpersistentvolumeclaim "myclaim-1" deleted
I0813 01:45:10.993816   57736 event.go:291] "Event occurred" object="namespace-1597283109-19691/myclaim-1" kind="PersistentVolumeClaim" apiVersion="v1" type="Normal" reason="FailedBinding" message="no persistent volumes available for this claim and no storage class is set"
... skipping 45 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 30 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 31 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 30 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 38 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 30 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 30 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 29 lines ...
Roles:              <none>
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
CreationTimestamp:  Thu, 13 Aug 2020 01:36:16 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Thu, 13 Aug 2020 01:36:16 +0000   Thu, 13 Aug 2020 01:37:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 138 lines ...
yes
has:the server doesn't have a resource type
Successful
message:yes
has:yes
Successful
message:error: --subresource can not be used with NonResourceURL
has:subresource can not be used with NonResourceURL
Successful
Successful
message:yes
0
has:0
... skipping 59 lines ...
		{Verbs:[get list watch] APIGroups:[] Resources:[configmaps] ResourceNames:[] NonResourceURLs:[]}
legacy-script.sh:840: Successful get rolebindings -n some-other-random -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-RB:
(Blegacy-script.sh:841: Successful get roles -n some-other-random -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-R:
(Blegacy-script.sh:842: Successful get clusterrolebindings -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-CRB:
(Blegacy-script.sh:843: Successful get clusterroles -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-CR:
(BSuccessful
message:error: only rbac.authorization.k8s.io/v1 is supported: not *v1beta1.ClusterRole
has:only rbac.authorization.k8s.io/v1 is supported
rolebinding.rbac.authorization.k8s.io "testing-RB" deleted
role.rbac.authorization.k8s.io "testing-R" deleted
warning: deleting cluster-scoped resources, not scoped to the provided namespace
clusterrole.rbac.authorization.k8s.io "testing-CR" deleted
clusterrolebinding.rbac.authorization.k8s.io "testing-CRB" deleted
... skipping 390 lines ...
namespace-1597283106-9243    default   0         29s
namespace-1597283109-19691   default   0         26s
namespace-1597283123-25881   default   0         12s
some-other-random            default   0         13s
has:all-ns-test-2
namespace "all-ns-test-1" deleted
E0813 01:45:38.857905   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
namespace "all-ns-test-2" deleted
I0813 01:45:45.748279   57736 namespace_controller.go:185] Namespace has been deleted all-ns-test-1
get.sh:376: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "valid-pod" force deleted
get.sh:380: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 228 lines ...
Successful
message:kubernetes:
has:kubernetes:
Successful
message:valid-pod:
has:valid-pod:
E0813 01:45:54.320392   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Successful
message:foo:
has:foo:
Successful
message:foo:
has:foo:
... skipping 282 lines ...
        "selfLink": ""
    }
}
certificate.sh:40: Successful get csr/foo {{range.status.conditions}}{{.type}}{{end}}: Approved
(BWarning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
certificatesigningrequest.certificates.k8s.io "foo" deleted
E0813 01:46:00.385890   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
certificate.sh:42: Successful get csr {{range.items}}{{.metadata.name}}{{end}}: 
(BWarning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
certificatesigningrequest.certificates.k8s.io/foo created
certificate.sh:46: Successful get csr/foo {{range.status.conditions}}{{.type}}{{end}}: 
(Bcertificatesigningrequest.certificates.k8s.io/foo denied
{
... skipping 84 lines ...
certificate.sh:49: Successful get csr/foo {{range.status.conditions}}{{.type}}{{end}}: Denied
(BWarning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
certificatesigningrequest.certificates.k8s.io "foo" deleted
certificate.sh:51: Successful get csr {{range.items}}{{.metadata.name}}{{end}}: 
(BWarning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
certificatesigningrequest.certificates.k8s.io/foo created
E0813 01:46:02.666950   57736 reflector.go:127] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
certificate.sh:54: Successful get csr/foo {{range.status.conditions}}{{.type}}{{end}}: 
(BWarning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
certificatesigningrequest.certificates.k8s.io/foo denied
{
    "apiVersion": "v1",
    "items": [
... skipping 149 lines ...
message:node/127.0.0.1 already uncordoned (server dry run)
has:already uncordoned
node-management.sh:145: Successful get nodes 127.0.0.1 {{.spec.unschedulable}}: <no value>
(Bnode/127.0.0.1 labeled
node-management.sh:150: Successful get nodes 127.0.0.1 {{.metadata.labels.test}}: label
(BSuccessful
message:error: cannot specify both a node name and a --selector option
See 'kubectl drain -h' for help and examples
has:cannot specify both a node name
Successful
message:error: USAGE: cordon NODE [flags]
See 'kubectl cordon -h' for help and examples
has:error\: USAGE\: cordon NODE
node/127.0.0.1 already uncordoned
Successful
message:error: You must provide one or more resources by argument or filename.
Example resource specifications include:
   '-f rsrc.yaml'
   '--filename=rsrc.json'
   '<resource> <name>'
   '<resource>'
has:must provide one or more resources
... skipping 14 lines ...
+++ [0813 01:46:14] Testing kubectl plugins
Successful
message:The following compatible plugins are available:

test/fixtures/pkg/kubectl/plugins/version/kubectl-version
  - warning: kubectl-version overwrites existing command: "kubectl version"
error: one plugin warning was found
has:kubectl-version overwrites existing command: "kubectl version"
Successful
message:The following compatible plugins are available:

test/fixtures/pkg/kubectl/plugins/kubectl-foo
test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo
  - warning: test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo is overshadowed by a similarly named plugin: test/fixtures/pkg/kubectl/plugins/kubectl-foo
error: one plugin warning was found
has:test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo is overshadowed by a similarly named plugin
Successful
message:The following compatible plugins are available:

test/fixtures/pkg/kubectl/plugins/kubectl-foo
has:plugins are available
Successful
message:Unable read directory "test/fixtures/pkg/kubectl/plugins/empty" from your PATH: open test/fixtures/pkg/kubectl/plugins/empty: no such file or directory. Skipping...
error: unable to find any kubectl plugins in your PATH
has:unable to find any kubectl plugins in your PATH
Successful
message:I am plugin foo
has:plugin foo
Successful
message:I am plugin bar called with args test/fixtures/pkg/kubectl/plugins/bar/kubectl-bar arg1
... skipping 10 lines ...

+++ Running case: test-cmd.run_impersonation_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_impersonation_tests
+++ [0813 01:46:15] Testing impersonation
Successful
message:error: requesting groups or user-extra for  without impersonating a user
has:without impersonating a user
Warning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
certificatesigningrequest.certificates.k8s.io/foo created
authorization.sh:68: Successful get csr/foo {{.spec.username}}: user1
(Bauthorization.sh:69: Successful get csr/foo {{range .spec.groups}}{{.}}{{end}}: system:authenticated
(BWarning: certificates.k8s.io/v1beta1 CertificateSigningRequest is deprecated in v1.19+, unavailable in v1.22+; use certificates.k8s.io/v1 CertificateSigningRequest
... skipping 64 lines ...
I0813 01:46:21.315356   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315370   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315492   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315498   54230 secure_serving.go:241] Stopped listening on 127.0.0.1:6443
I0813 01:46:21.315529   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315602   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.315611   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.315696   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315739   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.315771   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.315784   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315809   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.315869   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.315904   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.315996   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316026   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316027   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.316046   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.316102   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316122   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316148   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316150   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.316159   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.316212   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316250   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.316268   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.316279   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316279   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316345   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316366   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316372   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316419   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.316433   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.316445   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316546   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316548   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316591   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316669   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316710   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.316722   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.316730   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.317222   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.317277   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317308   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317326   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317352   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317390   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317416   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317439   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317476   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317531   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317536   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.317571   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.317608   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317623   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317632   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317646   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317692   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317721   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317748   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317548   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317828   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317854   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317858   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.317872   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.317919   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.317951   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317957   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.317981   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318017   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318042   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318050   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.317982   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318129   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318140   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.318140   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318198   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318215   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.318249   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318261   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318318   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318326   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318340   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318382   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318400   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.318429   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318445   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318513   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318524   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
I0813 01:46:21.318530   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318586   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318613   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.318616   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.318648   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318685   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318719   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318754   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318806   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318818   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318884   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318890   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318925   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318953   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.318995   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319071   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319124   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319136   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319143   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319199   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319209   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319258   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319282   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319363   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.319401   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.319429   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
I0813 01:46:21.319430   54230 clientconn.go:897] blockingPicker: the picked transport is not ready, loop back to repick
W0813 01:46:21.319489   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319489   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319570   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319575   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319585   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319689   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.319724   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320640   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320963   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.321020   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.321020   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320719   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.321078   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.321092   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320825   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320866   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320899   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:21.320927   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
junit report dir: /logs/artifacts
+++ [0813 01:46:21] Clean up complete
+ make test-integration
W0813 01:46:22.316093   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.316101   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.316191   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.316306   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.316376   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.316511   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.316627   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317524   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317626   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317690   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317748   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317787   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317879   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317957   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317963   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.317986   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318045   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318033   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318066   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318130   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318169   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318421   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318453   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318453   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318469   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318456   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318589   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318625   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318651   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318656   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318705   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318767   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318821   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318867   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318877   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318968   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.318990   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319006   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319030   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319072   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319131   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319139   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319256   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319273   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319296   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319320   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319356   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319390   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319392   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319411   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319471   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319496   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319543   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319704   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319845   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319915   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.319958   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.320026   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.320068   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.320091   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.320111   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321510   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321541   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321568   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321614   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321665   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321667   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321709   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321726   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321733   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321731   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:22.321949   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.612310   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.616557   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.634704   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.650212   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.657187   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.657358   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.664628   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.673278   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.681221   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.681436   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.695543   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.697734   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.705895   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.710622   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.716336   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.732037   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.733753   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.736706   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.741365   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.750937   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.757514   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.763108   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.767946   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.789849   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.793770   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.795075   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.810984   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.817730   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.818290   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.823217   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.894879   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.907636   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.919548   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.921416   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.922543   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.925729   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.931715   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.933845   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.945769   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.948568   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.957304   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.963694   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.974135   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.975314   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:23.998351   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.009318   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.040552   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.047325   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.060625   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.061597   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.062563   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.071181   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.072391   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.082921   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.112292   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.116945   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.124249   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.125572   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.134244   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.135551   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.136714   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.143520   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.151367   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.177241   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.195178   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.197799   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.201405   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.202442   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.214214   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.217925   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.220628   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:24.233962   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:25.782007   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:25.824139   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:25.966947   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.025401   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.028082   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.051817   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.081516   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.112435   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.112437   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.134689   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.158557   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.159153   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.166169   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.200596   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.218802   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.231304   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.231442   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.242898   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.262250   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.280185   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.285418   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.315355   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.381088   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.402111   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.407537   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.409797   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.418980   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.446661   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.457748   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.480332   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.487000   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.493493   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.494536   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.507745   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.543012   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.544141   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.552730   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.565428   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.566744   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.567216   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.569199   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.569277   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.573570   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.579930   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.583961   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.587056   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.614770   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.658158   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.667272   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.681809   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.696081   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.713492   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.729453   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.752452   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.761341   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.772395   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.792677   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.799611   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.818223   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.854654   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.887602   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.938826   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.952551   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.952641   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.956670   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:26.975166   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:27.032855   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:27.042341   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:27.078693   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:27.109456   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:27.121629   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W0813 01:46:27.175809   54230 clientconn.go:1223] grpc: addrConn.createTransport failed to connect to {http://127.0.0.1:2379  <nil> 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
+++ [0813 01:46:28] Checking etcd is on PATH
/home/prow/go/src/k8s.io/kubernetes/third_party/etcd/etcd
+++ [0813 01:46:28] Starting etcd instance
etcd --advertise-client-urls http://127.0.0.1:2379 --data-dir /tmp/tmp.vZh2T7q6nS --listen-client-urls http://127.0.0.1:2379 --log-level=debug > "/logs/artifacts/etcd.451632db-dd03-11ea-855c-e27bdbc74699.root.log.DEBUG.20200813-014628.92605" 2>/dev/null
Waiting for etcd to come up.
+++ [0813 01:46:28] On try 2, etcd: : {"health":"true"}
... skipping 9 lines ...
{"Time":"2020-08-13T01:49:33.591470638Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"pkg/endpoints/filters/impersonation.go:50 +0x203d\\nnet/http.HandlerFunc.ServeHTTP(0xc00f0c6b80, 0x7f4994a997f0, 0xc01e2872e0, 0xc01de91b00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f4994a997f0, 0xc01e2872e0, 0xc01de91a00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authentication.go:70 +0x672\\nnet/http.HandlerFunc.ServeHTTP(0xc0029fae60, 0x7f4994a997f0, 0xc01e2872e0, 0xc01de91a00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc01448e4e0, 0xc005fd03c0, 0x5175c60, 0xc01e2872e0, 0xc01de91a00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:113 +0xb8\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\"}
{"Time":"2020-08-13T01:49:33.593199618Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:228 +0xb2\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0xc014383020, 0x1f4)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:503 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc001c89180, 0xc01e0b8000, 0xc0, 0x5dfe, 0x0, 0x0, 0xc01b94c170)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:202 +0x1f7\\nencoding/json.(*Encoder).Encode(0xc01b94c190, 0x4857940, 0xc01e06d5e0, 0x0, 0x41147b)\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1cb\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0xc0004944b0, 0x511bc00, 0xc01e06d5e0, 0x510a820, 0xc001c89180, 0x0, 0x0)\\n\\t/home/prow/go/src/"}
{"Time":"2020-08-13T01:49:33.593208625Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:326 +0x2e9\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc0004944b0, 0x511bc00, 0xc01e06d5e0, 0x510a820, 0xc001c89180, 0x3a5cdfc, 0x6)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:300 +0x169\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc01e06d680, 0x511bc00, 0xc01e06d5e0, 0x510a820, 0xc001c89180, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0x396\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc01e06d680, 0x511bc00, 0xc01e06d5e0, 0x510a820, 0xc001c89180, 0xc0004944b0, 0x5124280)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_outpu"}
{"Time":"2020-08-13T01:49:33.593216597Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"t/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x170\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject(0x4919a37, 0x10, 0x7f4995e18168, 0xc01e06d680, 0x516f160, 0xc01f407e10, 0xc01e1b7500, 0x1f4, 0x511bc00, 0xc01e06d5e0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:96 +0x12c\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated(0x51726e0, 0xc01eb0f020, 0x5172a20, 0x732c648, 0x48fce50, 0x4, 0x48fb706, 0x2, 0x516f160, 0xc01f407e10, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:251 +0x572\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated(0x510eb60, 0xc01e181ea0, 0x51726e0, 0xc01eb0f020, 0x4"}
{"Time":"2020-08-13T01:49:33.593239646Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"/metrics.InstrumentRouteFunc.func1(0xc014382f90, 0xc00267b880)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:384 +0x282\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc00f331200, 0x7f4994a997f0, 0xc01f407da8, 0xc01e1b7500)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0xa84\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:199\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x491323e, 0xe, 0xc00f331200, 0xc00003e770, 0x7f4994a997f0, 0xc01f407da8, 0xc01e1b7500)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/handler.go:146 +0x539\\nk8s.io/kubernetes/vendor"}
{"Time":"2020-08-13T01:49:33.593256352Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"ernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:50 +0x203d\\nnet/http.HandlerFunc.ServeHTTP(0xc00f0c6b80, 0x7f4994a997f0, 0xc01f407da8, 0xc01e1b7500)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7f4994a997f0, 0xc01f407da8, 0xc01e1b7400)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authentication.go:70 +0x672\\nnet/http.HandlerFunc.ServeHTTP(0xc0029fae60, 0x7f4994a997f0, 0xc01f407da8, 0xc01e1b7400)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc0144e8180, 0xc005fd03c0, 0x5175c60, 0xc01f407da8, 0xc01e1b7400)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:113 +0xb8\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filt"}
{"Time":"2020-08-13T01:49:41.442991923Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestSelfSubjectAccessReview","Output":"eout.go:228 +0xb2\\nnet/http.Error(0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f6240, 0x60, 0x1f4)\\n\\t/usr/local/go/src/net/http/server.go:2054 +0x1f6\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.InternalError(0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f3000, 0x5183940, 0xc00930af20)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/errors.go:75 +0x11e\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f3000)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authorization.go:69 +0x497\\nnet/http.HandlerFunc.ServeHTTP(0xc005b4e600, 0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f3000)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func2(0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f300"}
{"Time":"2020-08-13T01:49:41.443000107Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestSelfSubjectAccessReview","Output":"0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/maxinflight.go:175 +0x4cf\\nnet/http.HandlerFunc.ServeHTTP(0xc005b5f590, 0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f3000)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f3000)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:50 +0x203d\\nnet/http.HandlerFunc.ServeHTTP(0xc005b4e640, 0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f3000)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7fb5000b4dd0, 0xc0090c0950, 0xc0092f2f00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authentication.go:70 +0x672\\nnet/http.HandlerFunc"}
{"Time":"2020-08-13T01:49:49.746709945Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"netes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0xc00ffb8fc0, 0x1f7)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:503 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc00c9541e0, 0xc004e42000, 0xa3, 0x9db, 0x0, 0x0, 0xc00d7c2178)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:202 +0x1f7\\nencoding/json.(*Encoder).Encode(0xc00d7c2198, 0x48be0a0, 0xc00ff69360, 0x0, 0x41147b)\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1cb\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0xc000112460, 0x5190d60, 0xc00ff69360, 0x517f980, 0xc00c9541e0, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/ru"}
{"Time":"2020-08-13T01:49:49.746719024Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"ntime/serializer/json/json.go:326 +0x2e9\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc000112460, 0x5190d60, 0xc00ff69360, 0x517f980, 0xc00c9541e0, 0x3aab074, 0x6)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:300 +0x169\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc00ff69400, 0x5190d60, 0xc00ff69360, 0x517f980, 0xc00c9541e0, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0x396\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc00ff69400, 0x5190d60, 0xc00ff69360, 0x517f980, 0xc00c9541e0, 0xc000112460, 0x5199aa0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/version"}
{"Time":"2020-08-13T01:49:49.746735686Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"ing/versioning.go:184 +0x170\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject(0x49816a2, 0x10, 0x7fb5001c08b8, 0xc00ff69400, 0x51e5400, 0xc00d90e678, 0xc01030a000, 0x1f7, 0x5190d60, 0xc00ff69360)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:96 +0x12c\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated(0x51e8a00, 0xc00cc8f3e0, 0x51e8d40, 0x7474268, 0x0, 0x0, 0x49623f6, 0x2, 0x51e5400, 0xc00d90e678, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:251 +0x572\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated(0x517f1e0, 0xc00ff692c0, 0x51e8a00, 0xc00cc8f3e0, 0x0, 0x0, 0x49623f6, 0x2, 0x51e5400, 0xc00d90e678, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_out"}
{"Time":"2020-08-13T01:49:49.746745782Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"put/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:270 +0x16f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.(*RequestScope).err(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:89\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.ConnectResource.func1.1()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:188 +0x259\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.RecordLongRunning(0xc01030a000, 0xc0103044d0, 0x496cb13, 0x9, 0xc00b512f90)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:340 +0x289\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.ConnectResource.func1(0x51e5400, 0xc00d90e678, 0xc01030a000)\\n\\t/home/prow/go/src/k8s.io"}
{"Time":"2020-08-13T01:49:49.746753074Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:185 +0x472\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints.restfulConnectResource.func1(0xc00ffb8f30, 0xc00bd785b0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/installer.go:1211 +0x99\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc00ffb8f30, 0xc00bd785b0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:384 +0x282\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc00cc9a090, 0x7fb5000b4dd0, 0xc00d90e648, 0xc01030a000)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0xa84\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io"}
{"Time":"2020-08-13T01:49:49.74676884Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/maxinflight.go:175 +0x4cf\\nnet/http.HandlerFunc.ServeHTTP(0xc00cc94600, 0x7fb5000b4dd0, 0xc00d90e648, 0xc01030a000)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7fb5000b4dd0, 0xc00d90e648, 0xc01030a000)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:50 +0x203d\\nnet/http.HandlerFunc.ServeHTTP(0xc00cc9c040, 0x7fb5000b4dd0, 0xc00d90e648, 0xc01030a000)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7fb5000b4dd0, 0xc00d90e648, 0xc008deff00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authentication.go:70 +0x672\\nnet/http.HandlerFunc.ServeHTTP(0xc00cb4fc70, 0x7fb5000b4dd0, 0xc00d90e648, 0xc008deff00"}
{"Time":"2020-08-13T01:49:49.746776208Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":")\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00cbb07e0, 0xc00cc98420, 0x51ec000, 0xc00d90e648, 0xc008deff00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:113 +0xb8\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:99 +0x1cc\\n\" addedInfo=\"\\nlogging error output: \\\"{\\\\\\\"kind\\\\\\\":\\\\\\\"Status\\\\\\\",\\\\\\\"apiVersion\\\\\\\":\\\\\\\"v1\\\\\\\",\\\\\\\"metadata\\\\\\\":{},\\\\\\\"status\\\\\\\":\\\\\\\"Failure\\\\\\\",\\\\\\\"message\\\\\\\":\\\\\\\"no endpoints available for service \\\\\\\\\\\\\\\"a\\\\\\\\\\\\\\\"\\\\\\\",\\\\\\\"reason\\\\\\\":\\\\\\\"ServiceUnavailable\\\\\\\",\\\\\\\"code\\\\\\\":503}\\\\n\\\"\\n\"\n"}
{"Time":"2020-08-13T01:49:58.382357733Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0xc00ceac270, 0x1f7)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:503 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc00c4783c0, 0xc0147fc000, 0xa3, 0x8ba, 0x0, 0x0, 0xc00c62e178)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:202 +0x1f7\\nencoding/json.(*Encoder).Encode(0xc00c62e198, 0x48be0a0, 0xc0112235e0, 0x0, 0x41147b)\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1cb\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0xc000112460, 0x5190d60, 0xc0112235e0, 0x517f980, 0xc00c4783c0, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachiner"}
{"Time":"2020-08-13T01:49:58.382368449Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"y/pkg/runtime/serializer/json/json.go:326 +0x2e9\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc000112460, 0x5190d60, 0xc0112235e0, 0x517f980, 0xc00c4783c0, 0x3aab074, 0x6)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:300 +0x169\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc011223680, 0x5190d60, 0xc0112235e0, 0x517f980, 0xc00c4783c0, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0x396\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc011223680, 0x5190d60, 0xc0112235e0, 0x517f980, 0xc00c4783c0, 0xc000112460, 0x5199aa0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer"}
{"Time":"2020-08-13T01:49:58.382379309Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"/versioning/versioning.go:184 +0x170\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject(0x49816a2, 0x10, 0x7fb5001c08b8, 0xc011223680, 0x51e5400, 0xc018f7c858, 0xc010989d00, 0x1f7, 0x5190d60, 0xc0112235e0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:96 +0x12c\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated(0x51e8a00, 0xc015919800, 0x51e8d40, 0x7474268, 0x0, 0x0, 0x49623f6, 0x2, 0x51e5400, 0xc018f7c858, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:251 +0x572\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated(0x517f1e0, 0xc011223540, 0x51e8a00, 0xc015919800, 0x0, 0x0, 0x49623f6, 0x2, 0x51e5400, 0xc018f7c858, ...)\\n\\t/home/prow/go/src/k8s.io/kuberne"}
{"Time":"2020-08-13T01:49:58.382391287Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"tes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:270 +0x16f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.(*RequestScope).err(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:89\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.ConnectResource.func1.1()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:188 +0x259\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.RecordLongRunning(0xc010989d00, 0xc008ff0a50, 0x496cb13, 0x9, 0xc018284f90)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:340 +0x289\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.ConnectResource.func1(0x51e5400, 0xc018f7c858, 0xc010989d00)\\n\\t/home/prow/go/sr"}
{"Time":"2020-08-13T01:49:58.382400947Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"c/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:185 +0x472\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints.restfulConnectResource.func1(0xc00ceac1e0, 0xc001aebb20)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/installer.go:1211 +0x99\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc00ceac1e0, 0xc001aebb20)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:384 +0x282\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc01591ef30, 0x7fb5000b4dd0, 0xc018f7c848, 0xc010989d00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0xa84\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/sr"}
{"Time":"2020-08-13T01:49:58.382420182Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/maxinflight.go:175 +0x4cf\\nnet/http.HandlerFunc.ServeHTTP(0xc01590f980, 0x7fb5000b4dd0, 0xc018f7c848, 0xc010989d00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7fb5000b4dd0, 0xc018f7c848, 0xc010989d00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:50 +0x203d\\nnet/http.HandlerFunc.ServeHTTP(0xc015341b40, 0x7fb5000b4dd0, 0xc018f7c848, 0xc010989d00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthentication.func1(0x7fb5000b4dd0, 0xc018f7c848, 0xc010989c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authentication.go:70 +0x672\\nnet/http.HandlerFunc.ServeHTTP(0xc015353630, 0x7fb5000b4dd0, 0xc018f7c848, 0xc0"}
{"Time":"2020-08-13T01:49:58.38242915Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"10989c00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1(0xc00cab7380, 0xc01591adc0, 0x51ec000, 0xc018f7c848, 0xc010989c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:113 +0xb8\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:99 +0x1cc\\n\" addedInfo=\"\\nlogging error output: \\\"{\\\\\\\"kind\\\\\\\":\\\\\\\"Status\\\\\\\",\\\\\\\"apiVersion\\\\\\\":\\\\\\\"v1\\\\\\\",\\\\\\\"metadata\\\\\\\":{},\\\\\\\"status\\\\\\\":\\\\\\\"Failure\\\\\\\",\\\\\\\"message\\\\\\\":\\\\\\\"no endpoints available for service \\\\\\\\\\\\\\\"a\\\\\\\\\\\\\\\"\\\\\\\",\\\\\\\"reason\\\\\\\":\\\\\\\"ServiceUnavailable\\\\\\\",\\\\\\\"code\\\\\\\":503}\\\\n\\\"\\n\"\n"}
{"Time":"2020-08-13T01:50:11.39830289Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0xc0266471a0, 0x1f4)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:503 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc026632dc0, 0xc009357000, 0xbb, 0xada, 0x0, 0x0, 0xc026653ef8)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:202 +0x1f7\\nencoding/json.(*Encoder).Encode(0xc026653f18, 0x48be0a0, 0xc02665a3c0, 0x0, 0x41147b)\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1cb\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0xc000112460, 0x5190d60, 0xc02665a3c0, 0x517f980, 0xc026632dc0, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apima"}
{"Time":"2020-08-13T01:50:11.398312932Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"chinery/pkg/runtime/serializer/json/json.go:326 +0x2e9\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc000112460, 0x5190d60, 0xc02665a3c0, 0x517f980, 0xc026632dc0, 0x3aab074, 0x6)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:300 +0x169\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc02665a460, 0x5190d60, 0xc02665a3c0, 0x517f980, 0xc026632dc0, 0x0, 0x0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0x396\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc02665a460, 0x5190d60, 0xc02665a3c0, 0x517f980, 0xc026632dc0, 0xc000112460, 0x5199aa0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/seri"}
{"Time":"2020-08-13T01:50:11.398321054Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"alizer/versioning/versioning.go:184 +0x170\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject(0x49816a2, 0x10, 0x7fb5001c08b8, 0xc02665a460, 0x51e5400, 0xc0257993a0, 0xc02664cb00, 0x1f4, 0x5190d60, 0xc02665a3c0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:96 +0x12c\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated(0x51e8a00, 0xc0212029c0, 0x51e8d40, 0x7474268, 0x0, 0x0, 0x49623f6, 0x2, 0x51e5400, 0xc0257993a0, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:251 +0x572\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated(0x51792c0, 0xc026631110, 0x51e8a00, 0xc0212029c0, 0x0, 0x0, 0x49623f6, 0x2, 0x51e5400, 0xc0257993a0, ...)\\n\\t/home/prow/go/src/k8s.io/k"}
{"Time":"2020-08-13T01:50:11.39832983Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"ubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:270 +0x16f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.(*RequestScope).err(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/rest.go:89\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers.DeleteResource.func1(0x51e5400, 0xc0257993a0, 0xc02664cb00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/delete.go:95 +0x1a25\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints.restfulDeleteResource.func1(0xc026647110, 0xc0263f5570)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/installer.go:1175 +0x83\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc026647110, 0xc0263f5570)\\n\\t/home/prow/go/sr"}
{"Time":"2020-08-13T01:50:11.398340055Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"c/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:384 +0x282\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc01e394e10, 0x7fb5000b4dd0, 0xc025799390, 0xc02664cb00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0xa84\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:199\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP(0x497ad09, 0xe, 0xc01e394e10, 0xc0191df730, 0x7fb5000b4dd0, 0xc025799390, 0xc02664cb00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/handler.go:146 +0x539\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1(0x7fb5000b4dd0, 0x"}
{"Time":"2020-08-13T01:50:11.39834819Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"c025799390, 0xc02664cb00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authorization.go:64 +0x563\\nnet/http.HandlerFunc.ServeHTTP(0xc01ff653c0, 0x7fb5000b4dd0, 0xc025799390, 0xc02664cb00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithMaxInFlightLimit.func2(0x7fb5000b4dd0, 0xc025799390, 0xc02664cb00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/maxinflight.go:175 +0x4cf\\nnet/http.HandlerFunc.ServeHTTP(0xc01e385b60, 0x7fb5000b4dd0, 0xc025799390, 0xc02664cb00)\\n\\t/usr/local/go/src/net/http/server.go:2042 +0x44\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1(0x7fb5000b4dd0, 0xc025799390, 0xc02664ca00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:148 +0xc70\\"}
... skipping 459 lines ...
    limit_test.go:218: skipping expensive test
    --- SKIP: TestLimits/MergePatchType_nested_patch_under_limit (0.00s)

=== SKIP: vendor/k8s.io/apiextensions-apiserver/test/integration TestLimits/ApplyPatchType_nested_patch_under_limit (0.00s)
    limit_test.go:229: skipping expensive test
I0813 01:59:06.825787  121607 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
W0813 01:59:06.826047  121607 ???:1] failed to send watch cancel request{watch-id 11 0  <nil>} {error 25 0  EOF}
W0813 01:59:06.828789  121607 cacher.go:148] Terminating all watchers from cacher *apiextensions.CustomResourceDefinition
I0813 01:59:06.828999  121607 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
I0813 01:59:06.829656  121607 ???:1] sending watch cancel request for closed watcher{watch-id 11 0  <nil>}
W0813 01:59:06.829739  121607 ???:1] failed to send watch cancel request{watch-id 11 0  <nil>} {error 25 0  EOF}
I0813 01:59:06.830198  121607 crd_finalizer.go:278] Shutting down CRDFinalizer
I0813 01:59:06.830479  121607 secure_serving.go:241] Stopped listening on 127.0.0.1:35613
I0813 01:59:06.830542  121607 dynamic_serving_content.go:145] Shutting down serving-cert::/tmp/apiextensions-apiserver468250988/apiserver.crt::/tmp/apiextensions-apiserver468250988/apiserver.key
I0813 01:59:06.830850  121607 nonstructuralschema_controller.go:198] Shutting down NonStructuralSchemaConditionController
I0813 01:59:06.830911  121607 establishing_controller.go:87] Shutting down EstablishingController
I0813 01:59:06.830932  121607 naming_controller.go:302] Shutting down NamingConditionController
I0813 01:59:06.830915  121607 apiapproval_controller.go:198] Shutting down KubernetesAPIApprovalPolicyConformantConditionController
I0813 01:59:06.830242  121607 customresource_discovery_controller.go:245] Shutting down DiscoveryController
I0813 01:59:06.830269  121607 tlsconfig.go:255] Shutting down DynamicServingCertificateController
    --- SKIP: TestLimits/ApplyPatchType_nested_patch_under_limit (0.00s)


=== Failed
=== FAIL: test/integration/auth TestDynamicClientBuilder (25.15s)
I0813 01:50:53.079317  114703 feature_gate.go:243] feature gates: &{map[TokenRequest:true]}
I0813 01:50:53.642230  114703 client.go:360] parsed scheme: "passthrough"
I0813 01:50:53.642288  114703 passthrough.go:48] ccResolverWrapper: sending update to cc: {[{http://127.0.0.1:2379  <nil> 0 <nil>}] <nil> <nil>}
I0813 01:50:53.642302  114703 clientconn.go:948] ClientConn switching balancer to "pick_first"
I0813 01:50:53.642550  114703 balancer_conn_wrappers.go:78] pickfirstBalancer: HandleSubConnStateChange: 0xc0257a7c60, {CONNECTING <nil>}
I0813 01:50:53.643088  114703 balancer_conn_wrappers.go:78] pickfirstBalancer: HandleSubConnStateChange: 0xc0257a7c60, {READY <nil>}
I0813 01:50:53.643899  114703 controlbuf.go:508] transport: loopyWriter.run returning. connection error: desc = "transport is closing"
I0813 01:50:54.411557  114703 plugins.go:84] Registered admission plugin "NamespaceLifecycle"
I0813 01:50:54.411597  114703 plugins.go:84] Registered admission plugin "ValidatingAdmissionWebhook"
I0813 01:50:54.411605  114703 plugins.go:84] Registered admission plugin "MutatingAdmissionWebhook"
I0813 01:50:54.411612  114703 plugins.go:84] Registered admission plugin "AlwaysAdmit"
I0813 01:50:54.411618  114703 plugins.go:84] Registered admission plugin "AlwaysPullImages"
I0813 01:50:54.411624  114703 plugins.go:84] Registered admission plugin "LimitPodHardAntiAffinityTopology"
... skipping 740 lines ...
I0813 01:51:17.499594  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/node.k8s.io/v1beta1/runtimeclasses?limit=500&resourceVersion=0" latency="6.932154ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
W0813 01:51:17.500430  114703 warnings.go:67] node.k8s.io/v1beta1 RuntimeClass is deprecated in v1.22+, unavailable in v1.25+
I0813 01:51:17.500853  114703 httplog.go:89] "HTTP" verb="GET" URI="/apis/storage.k8s.io/v1/storageclasses?limit=500&resourceVersion=0" latency="15.609821ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.501695  114703 httplog.go:89] "HTTP" verb="GET" URI="/api/v1/services?limit=500&resourceVersion=0" latency="3.494899ms" userAgent="auth.test/v0.0.0 (linux/amd64) kubernetes/$Format" srcIP="127.0.0.1:50570" resp=200
I0813 01:51:17.503383  114703 get.go:259] "Starting watch" path="/apis/networking.k8s.io/v1/ingressclasses" resourceVersion="9634" labels="" fields="" timeout="7m45s"
I0813 01:51:17.503981  114703 get.go:259] "Starting watch" path="/apis/admissionregistration.k8s.io/v1/validatingwebhookconfigurations" resourceVersion="9634" labels="" fields="" timeout="5m3s"
I0813 01:51:17.516314  114703 healthz.go:243] healthz check failed: poststarthook/bootstrap-con