This job view page is being replaced by Spyglass soon. Check out the new job view.
ResultFAILURE
Tests 1 failed / 3696 succeeded
Started2021-10-13 22:24
Elapsed40m26s
Revisionmaster

Test Failures


k8s.io/kubernetes/test/integration/apiserver/admissionwebhook TestWebhookAdmissionWithoutWatchCache 1m23s

go test -v k8s.io/kubernetes/test/integration/apiserver/admissionwebhook -run TestWebhookAdmissionWithoutWatchCache$
=== RUN   TestWebhookAdmissionWithoutWatchCache
I1013 22:47:26.782526  116659 crdregistration_controller.go:142] Shutting down crd-autoregister controller
I1013 22:47:26.782535  116659 apiapproval_controller.go:198] Shutting down KubernetesAPIApprovalPolicyConformantConditionController
I1013 22:47:26.782542  116659 nonstructuralschema_controller.go:204] Shutting down NonStructuralSchemaConditionController
I1013 22:47:26.782552  116659 establishing_controller.go:87] Shutting down EstablishingController
I1013 22:47:26.782559  116659 naming_controller.go:302] Shutting down NamingConditionController
I1013 22:47:26.782567  116659 crd_finalizer.go:278] Shutting down CRDFinalizer
I1013 22:47:26.782576  116659 autoregister_controller.go:165] Shutting down autoregister controller
I1013 22:47:26.782584  116659 available_controller.go:503] Shutting down AvailableConditionController
I1013 22:47:26.782593  116659 apf_controller.go:322] Shutting down API Priority and Fairness config worker
I1013 22:47:26.782606  116659 cluster_authentication_trust_controller.go:463] Shutting down cluster_authentication_trust_controller controller
I1013 22:47:26.782668  116659 dynamic_cafile_content.go:170] "Shutting down controller" name="request-header::/tmp/kubernetes-kube-apiserver2236613846/proxy-ca.crt"
I1013 22:47:26.782691  116659 dynamic_cafile_content.go:170] "Shutting down controller" name="client-ca-bundle::/tmp/kubernetes-kube-apiserver2236613846/client-ca.crt"
I1013 22:47:26.782711  116659 controller.go:89] Shutting down OpenAPI AggregationController
I1013 22:47:26.782721  116659 dynamic_cafile_content.go:170] "Shutting down controller" name="client-ca-bundle::/tmp/kubernetes-kube-apiserver2236613846/client-ca.crt"
I1013 22:47:26.782733  116659 tlsconfig.go:255] "Shutting down DynamicServingCertificateController"
I1013 22:47:26.782747  116659 dynamic_serving_content.go:145] "Shutting down controller" name="serving-cert::/tmp/kubernetes-kube-apiserver2236613846/apiserver.crt::/tmp/kubernetes-kube-apiserver2236613846/apiserver.key"
I1013 22:47:26.782760  116659 secure_serving.go:311] Stopped listening on 127.0.0.1:35751
I1013 22:47:26.782766  116659 dynamic_cafile_content.go:170] "Shutting down controller" name="request-header::/tmp/kubernetes-kube-apiserver2236613846/proxy-ca.crt"
I1013 22:47:26.782790  116659 dynamic_cafile_content.go:210] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver2236613846/client-ca.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver2236613846/client-ca.crt"
I1013 22:47:26.782830  116659 dynamic_cafile_content.go:210] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver2236613846/proxy-ca.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver2236613846/proxy-ca.crt"
I1013 22:47:26.782854  116659 dynamic_cafile_content.go:210] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver2236613846/client-ca.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver2236613846/client-ca.crt"
I1013 22:47:26.782872  116659 dynamic_serving_content.go:191] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver2236613846/apiserver.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver2236613846/apiserver.crt"
E1013 22:47:26.784606  116659 dynamic_cafile_content.go:165] "Failed to watch CA file, will retry later" err="error adding watch for file /tmp/kubernetes-kube-apiserver2236613846/client-ca.crt: no such file or directory"
E1013 22:47:26.784609  116659 dynamic_cafile_content.go:165] "Failed to watch CA file, will retry later" err="error adding watch for file /tmp/kubernetes-kube-apiserver2236613846/proxy-ca.crt: no such file or directory"
E1013 22:47:26.784619  116659 dynamic_cafile_content.go:165] "Failed to watch CA file, will retry later" err="error adding watch for file /tmp/kubernetes-kube-apiserver2236613846/client-ca.crt: no such file or directory"
E1013 22:47:26.784654  116659 dynamic_serving_content.go:140] "Failed to watch cert and key file, will retry later" err="error adding watch for file /tmp/kubernetes-kube-apiserver2236613846/apiserver.crt: no such file or directory"
E1013 22:47:26.841117  116659 controller.go:189] Unable to remove endpoints from kubernetes service: Get "https://127.0.0.1:35751/api/v1/namespaces/default/endpoints/kubernetes": dial tcp 127.0.0.1:35751: connect: connection refused
    testserver.go:380: Resolved testserver package path to: "/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/cmd/kube-apiserver/app/testing"
I1013 22:47:27.804536  116659 serving.go:342] Generated self-signed cert (/tmp/kubernetes-kube-apiserver3221788115/apiserver.crt, /tmp/kubernetes-kube-apiserver3221788115/apiserver.key)
I1013 22:47:27.804569  116659 server.go:554] external host was not specified, using 127.0.0.1
W1013 22:47:27.804581  116659 authentication.go:523] AnonymousAuth is not allowed with the AlwaysAllow authorizer. Resetting AnonymousAuth to false. You should use a different authorizer
    testserver.go:215: runtime-config=map[api/all:true]
    testserver.go:216: Starting kube-apiserver on port 43731...
W1013 22:47:28.595656  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.595801  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.595873  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.596267  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.597581  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.597629  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.597822  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.597865  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.597895  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.597936  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.598221  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.598457  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.598557  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I1013 22:47:28.598587  116659 plugins.go:158] Loaded 9 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,TaintNodesByCondition,Priority,DefaultTolerationSeconds,DefaultStorageClass,RuntimeClass,DefaultIngressClass,MutatingAdmissionWebhook.
I1013 22:47:28.598596  116659 plugins.go:161] Loaded 10 validating admission controller(s) successfully in the following order: LimitRanger,PodSecurity,Priority,PersistentVolumeClaimResize,RuntimeClass,CertificateApproval,CertificateSigning,CertificateSubjectRestriction,ValidatingAdmissionWebhook,ResourceQuota.
W1013 22:47:28.598687  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:28.598715  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I1013 22:47:28.600553  116659 plugins.go:158] Loaded 9 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,TaintNodesByCondition,Priority,DefaultTolerationSeconds,DefaultStorageClass,RuntimeClass,DefaultIngressClass,MutatingAdmissionWebhook.
I1013 22:47:28.600580  116659 plugins.go:161] Loaded 10 validating admission controller(s) successfully in the following order: LimitRanger,PodSecurity,Priority,PersistentVolumeClaimResize,RuntimeClass,CertificateApproval,CertificateSigning,CertificateSubjectRestriction,ValidatingAdmissionWebhook,ResourceQuota.
W1013 22:47:28.644356  116659 genericapiserver.go:477] Skipping API apiextensions.k8s.io/v1beta1 because it has no resources.
W1013 22:47:28.644534  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I1013 22:47:28.645809  116659 instance.go:273] Using reconciler: lease
W1013 22:47:29.162485  116659 genericapiserver.go:477] Skipping API authentication.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.167392  116659 genericapiserver.go:477] Skipping API authorization.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.215990  116659 genericapiserver.go:477] Skipping API certificates.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.220981  116659 genericapiserver.go:477] Skipping API coordination.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.240553  116659 genericapiserver.go:477] Skipping API networking.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.277101  116659 genericapiserver.go:477] Skipping API rbac.authorization.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.277261  116659 genericapiserver.go:477] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
W1013 22:47:29.282819  116659 genericapiserver.go:477] Skipping API scheduling.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.282860  116659 genericapiserver.go:477] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
W1013 22:47:29.320386  116659 genericapiserver.go:477] Skipping API apps/v1beta2 because it has no resources.
W1013 22:47:29.320432  116659 genericapiserver.go:477] Skipping API apps/v1beta1 because it has no resources.
W1013 22:47:29.323960  116659 genericapiserver.go:477] Skipping API admissionregistration.k8s.io/v1beta1 because it has no resources.
I1013 22:47:29.330643  116659 plugins.go:158] Loaded 9 mutating admission controller(s) successfully in the following order: NamespaceLifecycle,LimitRanger,TaintNodesByCondition,Priority,DefaultTolerationSeconds,DefaultStorageClass,RuntimeClass,DefaultIngressClass,MutatingAdmissionWebhook.
I1013 22:47:29.330679  116659 plugins.go:161] Loaded 10 validating admission controller(s) successfully in the following order: LimitRanger,PodSecurity,Priority,PersistentVolumeClaimResize,RuntimeClass,CertificateApproval,CertificateSigning,CertificateSubjectRestriction,ValidatingAdmissionWebhook,ResourceQuota.
W1013 22:47:29.332110  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:47:29.354382  116659 genericapiserver.go:477] Skipping API apiregistration.k8s.io/v1beta1 because it has no resources.
W1013 22:47:29.354891  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
    testserver.go:235: Waiting for /healthz to be ok...
I1013 22:47:32.462042  116659 dynamic_cafile_content.go:156] "Starting controller" name="request-header::/tmp/kubernetes-kube-apiserver3221788115/proxy-ca.crt"
I1013 22:47:32.462369  116659 dynamic_serving_content.go:131] "Starting controller" name="serving-cert::/tmp/kubernetes-kube-apiserver3221788115/apiserver.crt::/tmp/kubernetes-kube-apiserver3221788115/apiserver.key"
I1013 22:47:32.462394  116659 dynamic_cafile_content.go:156] "Starting controller" name="client-ca-bundle::/tmp/kubernetes-kube-apiserver3221788115/client-ca.crt"
I1013 22:47:32.462473  116659 secure_serving.go:266] Serving securely on 127.0.0.1:43731
I1013 22:47:32.462558  116659 autoregister_controller.go:141] Starting autoregister controller
I1013 22:47:32.462579  116659 apf_controller.go:313] Starting API Priority and Fairness config controller
I1013 22:47:32.462598  116659 tlsconfig.go:240] "Starting DynamicServingCertificateController"
I1013 22:47:32.462623  116659 apiservice_controller.go:97] Starting APIServiceRegistrationController
I1013 22:47:32.462630  116659 cache.go:32] Waiting for caches to sync for APIServiceRegistrationController controller
I1013 22:47:32.462598  116659 cache.go:32] Waiting for caches to sync for autoregister controller
I1013 22:47:32.463111  116659 customresource_discovery_controller.go:209] Starting DiscoveryController
I1013 22:47:32.463147  116659 crdregistration_controller.go:111] Starting crd-autoregister controller
I1013 22:47:32.463158  116659 shared_informer.go:240] Waiting for caches to sync for crd-autoregister
I1013 22:47:32.463189  116659 controller.go:85] Starting OpenAPI controller
I1013 22:47:32.463224  116659 naming_controller.go:291] Starting NamingConditionController
I1013 22:47:32.463248  116659 establishing_controller.go:76] Starting EstablishingController
I1013 22:47:32.463269  116659 nonstructuralschema_controller.go:192] Starting NonStructuralSchemaConditionController
I1013 22:47:32.463290  116659 apiapproval_controller.go:186] Starting KubernetesAPIApprovalPolicyConformantConditionController
I1013 22:47:32.463318  116659 crd_finalizer.go:266] Starting CRDFinalizer
W1013 22:47:32.464027  116659 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I1013 22:47:32.464178  116659 cluster_authentication_trust_controller.go:440] Starting cluster_authentication_trust_controller controller
I1013 22:47:32.464185  116659 shared_informer.go:240] Waiting for caches to sync for cluster_authentication_trust_controller
I1013 22:47:32.464217  116659 controller.go:83] Starting OpenAPI AggregationController
I1013 22:47:32.464670  116659 dynamic_cafile_content.go:156] "Starting controller" name="client-ca-bundle::/tmp/kubernetes-kube-apiserver3221788115/client-ca.crt"
I1013 22:47:32.476832  116659 available_controller.go:491] Starting AvailableConditionController
I1013 22:47:32.476871  116659 cache.go:32] Waiting for caches to sync for AvailableConditionController controller
I1013 22:47:32.479033  116659 dynamic_cafile_content.go:156] "Starting controller" name="request-header::/tmp/kubernetes-kube-apiserver3221788115/proxy-ca.crt"
W1013 22:47:32.498350  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.498659  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.499475  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.500243  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.505348  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.508812  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.533270  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.546714  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:47:32.563718  116659 shared_informer.go:247] Caches are synced for crd-autoregister 
I1013 22:47:32.563847  116659 cache.go:39] Caches are synced for APIServiceRegistrationController controller
I1013 22:47:32.563719  116659 apf_controller.go:318] Running API Priority and Fairness config worker
I1013 22:47:32.563673  116659 cache.go:39] Caches are synced for autoregister controller
I1013 22:47:32.564314  116659 shared_informer.go:247] Caches are synced for cluster_authentication_trust_controller 
W1013 22:47:32.570455  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:47:32.577868  116659 cache.go:39] Caches are synced for AvailableConditionController controller
W1013 22:47:32.584127  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:47:32.586990  116659 controller.go:611] quota admission added evaluator for: namespaces
W1013 22:47:32.639782  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.663331  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.671918  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.688055  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.703044  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.709949  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.723335  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.735763  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.785004  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.788265  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.801492  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.825985  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.826277  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.841523  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.865424  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.866058  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.876541  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.925667  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.926099  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.940701  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.950036  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.964345  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.977932  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:32.977929  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.003688  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.015802  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.015860  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.078216  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.079532  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.088433  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.120403  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.121965  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.133846  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.158327  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.158354  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.170661  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.224843  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.225328  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.239221  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.251801  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.253045  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.257763  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.282747  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.284517  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.293493  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.317974  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.318450  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.379715  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.385963  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.398098  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.412569  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.423495  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.444952  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:47:33.462568  116659 controller.go:132] OpenAPI AggregationController: action for item : Nothing (removed from the queue).
I1013 22:47:33.462606  116659 controller.go:132] OpenAPI AggregationController: action for item k8s_internal_local_delegation_chain_0000000000: Nothing (removed from the queue).
W1013 22:47:33.472594  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.524890  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.542755  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:47:33.542792  116659 storage_scheduling.go:93] created PriorityClass system-node-critical with value 2000001000
W1013 22:47:33.555822  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.582866  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.585546  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:47:33.585762  116659 storage_scheduling.go:93] created PriorityClass system-cluster-critical with value 2000000000
I1013 22:47:33.585778  116659 storage_scheduling.go:109] all system priority classes are created successfully or already exist.
W1013 22:47:33.607403  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.620678  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.682761  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.686432  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.701594  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.731187  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.731610  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.761168  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.777897  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.844400  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.875467  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.907672  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.983864  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:33.984270  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:47:33.986038  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
E1013 22:47:33.986065  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
W1013 22:47:34.001084  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.014637  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.016928  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.032988  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.058656  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.131984  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.134785  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.165136  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:47:34.191412  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
=== CONT  TestWebhookAdmissionWithoutWatchCache
    admission_test.go:656: expected resources to process in < 150ms, average was 162.054349ms
I1013 22:48:49.806421  116659 dynamic_cafile_content.go:210] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver3221788115/client-ca.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver3221788115/client-ca.crt"
E1013 22:48:49.806516  116659 dynamic_cafile_content.go:236] key failed with : open /tmp/kubernetes-kube-apiserver3221788115/client-ca.crt: no such file or directory
I1013 22:48:49.806553  116659 dynamic_cafile_content.go:210] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver3221788115/client-ca.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver3221788115/client-ca.crt"
E1013 22:48:49.806641  116659 dynamic_cafile_content.go:236] key failed with : open /tmp/kubernetes-kube-apiserver3221788115/client-ca.crt: no such file or directory
I1013 22:48:49.806741  116659 controller.go:186] Shutting down kubernetes service endpoint reconciler
I1013 22:48:49.806805  116659 dynamic_cafile_content.go:210] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver3221788115/proxy-ca.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver3221788115/proxy-ca.crt"
I1013 22:48:49.806864  116659 dynamic_serving_content.go:191] "Failed to remove file watch, it may have been deleted" file="/tmp/kubernetes-kube-apiserver3221788115/apiserver.crt" err="can't remove non-existent inotify watch for: /tmp/kubernetes-kube-apiserver3221788115/apiserver.crt"
E1013 22:48:49.806886  116659 dynamic_cafile_content.go:236] key failed with : open /tmp/kubernetes-kube-apiserver3221788115/proxy-ca.crt: no such file or directory
E1013 22:48:49.806917  116659 dynamic_serving_content.go:217] key failed with : open /tmp/kubernetes-kube-apiserver3221788115/apiserver.crt: no such file or directory
--- FAIL: TestWebhookAdmissionWithoutWatchCache (83.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.bindings
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.bindings (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.bindings/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.bindings/create (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.configmaps
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.configmaps (0.83s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/delete (0.42s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.configmaps/deletecollection (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.endpoints
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.endpoints (0.95s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/create
I1013 22:47:38.400954  116659 controller.go:611] quota admission added evaluator for: endpoints
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/delete (0.51s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.endpoints/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.events
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.events (0.79s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.events/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.events/create (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.events/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.events/update (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.events/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.events/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.events/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.events/delete (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.events/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.events/deletecollection (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.limitranges
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.limitranges (0.84s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/create
I1013 22:47:40.144034  116659 controller.go:611] quota admission added evaluator for: limitranges
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/create (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/delete (0.32s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.limitranges/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/create (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces/delete (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.finalize
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.finalize (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.finalize/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.finalize/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.status (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.status/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.namespaces.status/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes/create (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes/delete (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes/deletecollection (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy (0.32s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/create
    admission_test.go:1198: testing POST
E1013 22:47:42.562759  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): no preferred addresses found; known addresses: []
    admission_test.go:1198: testing GET
E1013 22:47:42.594641  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): no preferred addresses found; known addresses: []
    admission_test.go:1198: testing HEAD
E1013 22:47:42.623303  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): an error on the server ("unknown") has prevented the request from succeeding (head nodes node1)
    admission_test.go:1198: testing OPTIONS
E1013 22:47:42.666068  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): no preferred addresses found; known addresses: []
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/create (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/update
    admission_test.go:1198: testing PUT
E1013 22:47:42.709528  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): no preferred addresses found; known addresses: []
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/update (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/patch
    admission_test.go:1198: testing PATCH
E1013 22:47:42.746782  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): no preferred addresses found; known addresses: []
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/delete
    admission_test.go:1198: testing DELETE
E1013 22:47:42.805154  116659 status.go:71] apiserver received an error that is not an metav1.Status: &node.NoMatchError{addresses:[]v1.NodeAddress(nil)}: no preferred addresses found; known addresses: []
    admission_test.go:1211: debug: result of subresource proxy (error expected): no preferred addresses found; known addresses: []
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.proxy/delete (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.status (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.status/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.nodes.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.nodes.status/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims (0.73s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/create (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/delete (0.30s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims.status (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims.status/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumeclaims.status/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes (1.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/create
W1013 22:47:43.878711  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:47:43.887605  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:43.887640  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:43.887646  116659 warnings.go:70] v1/mutation/false
W1013 22:47:43.887650  116659 warnings.go:70] v1/mutation/true
E1013 22:47:43.887699  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/create (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/delete (0.56s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes.status (0.24s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes.status/update (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.persistentvolumes.status/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods (0.98s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods/create (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods/delete (0.59s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods/deletecollection (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.attach
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.attach (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.attach/create
    admission_test.go:1095: verifying GET
    admission_test.go:1112: debug: result of subresource connect: pod pod1 does not have a host assigned
    admission_test.go:1095: verifying POST
    admission_test.go:1112: debug: result of subresource connect: pod pod1 does not have a host assigned
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.attach/create (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.binding
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.binding (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.binding/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.binding/create (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.ephemeralcontainers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.ephemeralcontainers (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.ephemeralcontainers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.ephemeralcontainers/update (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.ephemeralcontainers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.ephemeralcontainers/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.eviction
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.eviction (0.32s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.eviction/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.eviction/create (0.32s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.exec
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.exec (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.exec/create
    admission_test.go:1095: verifying GET
    admission_test.go:1112: debug: result of subresource connect: pod pod1 does not have a host assigned
    admission_test.go:1095: verifying POST
    admission_test.go:1112: debug: result of subresource connect: pod pod1 does not have a host assigned
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.exec/create (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.portforward
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.portforward (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.portforward/create
    admission_test.go:1095: verifying GET
    admission_test.go:1112: debug: result of subresource connect: pod pod1 does not have a host assigned
    admission_test.go:1095: verifying POST
    admission_test.go:1112: debug: result of subresource connect: pod pod1 does not have a host assigned
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.portforward/create (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy (0.42s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/create
    admission_test.go:1198: testing POST
    admission_test.go:1211: debug: result of subresource proxy (error expected): address not allowed
    admission_test.go:1198: testing GET
    admission_test.go:1211: debug: result of subresource proxy (error expected): address not allowed
    admission_test.go:1198: testing HEAD
    admission_test.go:1211: debug: result of subresource proxy (error expected): the server rejected our request for an unknown reason (head pods pod1)
    admission_test.go:1198: testing OPTIONS
    admission_test.go:1211: debug: result of subresource proxy (error expected): address not allowed
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/create (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/update
    admission_test.go:1198: testing PUT
    admission_test.go:1211: debug: result of subresource proxy (error expected): address not allowed
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/patch
    admission_test.go:1198: testing PATCH
    admission_test.go:1211: debug: result of subresource proxy (error expected): address not allowed
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/delete
    admission_test.go:1198: testing DELETE
    admission_test.go:1211: debug: result of subresource proxy (error expected): address not allowed
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.proxy/delete (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.status (0.24s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.status/update (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.pods.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.pods.status/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates (0.90s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/create
I1013 22:47:47.933456  116659 controller.go:611] quota admission added evaluator for: podtemplates
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/delete (0.45s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.podtemplates/deletecollection (0.28s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers (0.90s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/delete (0.52s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.scale
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.scale (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.scale/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.scale/update (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.scale/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.scale/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.status (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.status/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.replicationcontrollers.status/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas (0.95s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/delete (0.47s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas.status (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas.status/update (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.resourcequotas.status/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.secrets
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.secrets (0.90s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.secrets/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.secrets/create (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.secrets/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.secrets/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.secrets/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.secrets/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.secrets/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.secrets/delete (0.52s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.secrets/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.secrets/deletecollection (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts (0.64s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/create
I1013 22:47:52.295611  116659 controller.go:611] quota admission added evaluator for: serviceaccounts
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/update (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/delete (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts/deletecollection (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts.token
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts.token (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts.token/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.serviceaccounts.token/create (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services (1.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services/create (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services/update (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services/delete (0.58s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services/deletecollection
W1013 22:47:53.924249  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:47:53.932889  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:53.932916  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:53.932921  116659 warnings.go:70] v1/mutation/false
W1013 22:47:53.932926  116659 warnings.go:70] v1/mutation/true
E1013 22:47:53.932989  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services/deletecollection (0.49s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy (0.48s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/create
    admission_test.go:1198: testing POST
    admission_test.go:1211: debug: result of subresource proxy (error expected): services "service1" not found
    admission_test.go:1198: testing GET
    admission_test.go:1211: debug: result of subresource proxy (error expected): services "service1" not found
    admission_test.go:1198: testing HEAD
    admission_test.go:1211: debug: result of subresource proxy (error expected): the server could not find the requested resource (head services service1)
    admission_test.go:1198: testing OPTIONS
    admission_test.go:1211: debug: result of subresource proxy (error expected): services "service1" not found
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/create (0.30s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/update
    admission_test.go:1198: testing PUT
    admission_test.go:1211: debug: result of subresource proxy (error expected): services "service1" not found
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/patch
    admission_test.go:1198: testing PATCH
    admission_test.go:1211: debug: result of subresource proxy (error expected): services "service1" not found
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/delete
    admission_test.go:1198: testing DELETE
    admission_test.go:1211: debug: result of subresource proxy (error expected): services "service1" not found
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.proxy/delete (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.status (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.status/update (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/.v1.services.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/.v1.services.status/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations
    --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations (0.65s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/delete (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.mutatingwebhookconfigurations/deletecollection (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations
    --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations (0.67s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/update (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/delete (0.30s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/admissionregistration.k8s.io.v1.validatingwebhookconfigurations/deletecollection (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions (1.68s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/update
W1013 22:47:56.334421  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:56.334447  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:56.334452  116659 warnings.go:70] v1/mutation/false
W1013 22:47:56.334457  116659 warnings.go:70] v1/mutation/true
W1013 22:47:56.334462  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:56.334466  116659 warnings.go:70] v1/validation/false
W1013 22:47:56.334470  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:56.334478  116659 warnings.go:70] v1/validation/true
W1013 22:47:56.367447  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:56.367482  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:56.367488  116659 warnings.go:70] v1/mutation/false
W1013 22:47:56.367492  116659 warnings.go:70] v1/mutation/true
W1013 22:47:56.367497  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:56.367502  116659 warnings.go:70] v1/validation/false
W1013 22:47:56.367506  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:56.367510  116659 warnings.go:70] v1/validation/true
W1013 22:47:56.473022  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:56.473039  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:56.473044  116659 warnings.go:70] v1/mutation/false
W1013 22:47:56.473048  116659 warnings.go:70] v1/mutation/true
W1013 22:47:56.473053  116659 warnings.go:70] v1/validation/true
W1013 22:47:56.473057  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:56.473061  116659 warnings.go:70] v1/validation/false
W1013 22:47:56.473066  116659 warnings.go:70] v1beta1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/update (0.29s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/delete
    admission_test.go:741: waiting for schema.GroupVersionResource{Group:"apiextensions.k8s.io", Version:"v1", Resource:"customresourcedefinitions"} to be deleted (name: openshiftwebconsoleconfigs.webconsole2.operator.openshift.io, finalizers: [customresourcecleanup.apiextensions.k8s.io])...
W1013 22:47:56.900286  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:56.900317  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:56.900322  116659 warnings.go:70] v1/mutation/false
W1013 22:47:56.900326  116659 warnings.go:70] v1/mutation/true
W1013 22:47:56.900332  116659 warnings.go:70] v1/validation/false
W1013 22:47:56.900336  116659 warnings.go:70] v1/validation/true
W1013 22:47:56.900340  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:56.900345  116659 warnings.go:70] v1beta1/validation/true
    admission_test.go:741: waiting for schema.GroupVersionResource{Group:"apiextensions.k8s.io", Version:"v1", Resource:"customresourcedefinitions"} to be deleted (name: openshiftwebconsoleconfigs.webconsole2.operator.openshift.io, finalizers: [customresourcecleanup.apiextensions.k8s.io])...
W1013 22:47:57.000547  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.000580  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.000585  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.000589  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.000594  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.000598  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.000643  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.000649  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.038050  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.038067  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.038072  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.038077  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.038081  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.038086  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.038107  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.038111  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.132693  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.132865  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.132917  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.132974  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.133025  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.133071  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.133117  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.133160  116659 warnings.go:70] v1/validation/true
2021/10/13 22:47:57 http: TLS handshake error from 127.0.0.1:46962: EOF
W1013 22:47:57.177409  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.177436  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.177441  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.177446  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.177451  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.177455  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.177459  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.177464  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.200397  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.200554  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.200642  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.200730  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.200896  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.200988  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.201054  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.201130  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.293398  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.293425  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.293435  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.293439  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.293444  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.293448  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.293452  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.293457  116659 warnings.go:70] v1/validation/true
    admission_test.go:787: waiting for other finalizers on schema.GroupVersionResource{Group:"apiextensions.k8s.io", Version:"v1", Resource:"customresourcedefinitions"} openshiftwebconsoleconfigs.webconsole2.operator.openshift.io to be removed, existing finalizers are [test/k8s.io customresourcecleanup.apiextensions.k8s.io]
W1013 22:47:57.340404  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.340433  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.340437  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.340440  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.340444  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.340449  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.340454  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.340459  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.348566  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.348594  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.348599  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.348603  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.348608  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.348612  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.348617  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.348621  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.396843  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.396856  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.396859  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.396863  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.396866  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.396871  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.396874  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.396877  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.406573  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.406833  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.406940  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.408475  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.408497  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.408501  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.408506  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.408510  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.473265  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.473297  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.473303  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.473308  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.473312  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.473316  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.473320  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.473325  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/delete (0.85s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/deletecollection
W1013 22:47:57.578667  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.578693  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.578698  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.578702  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.578707  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.578712  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.578716  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.578720  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.607060  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.607090  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.607098  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.607103  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.607108  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.607112  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.607116  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.607121  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.608902  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.608925  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.608930  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.608935  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.608939  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.608943  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:57.608948  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.608952  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.688285  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.688313  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.688317  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.688321  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.713205  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:57.713231  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:57.713237  116659 warnings.go:70] v1/mutation/false
W1013 22:47:57.713241  116659 warnings.go:70] v1/mutation/true
W1013 22:47:57.713245  116659 warnings.go:70] v1/validation/false
W1013 22:47:57.713250  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:57.713254  116659 warnings.go:70] v1/validation/true
W1013 22:47:57.713258  116659 warnings.go:70] v1beta1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions/deletecollection (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions.status (1.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions.status/update
W1013 22:47:58.073830  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:58.073863  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:58.073869  116659 warnings.go:70] v1/mutation/false
W1013 22:47:58.073875  116659 warnings.go:70] v1/mutation/true
W1013 22:47:58.073880  116659 warnings.go:70] v1/validation/false
W1013 22:47:58.073884  116659 warnings.go:70] v1/validation/true
W1013 22:47:58.073888  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:58.073893  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:58.397917  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:58.397947  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:58.397956  116659 warnings.go:70] v1/mutation/false
W1013 22:47:58.397960  116659 warnings.go:70] v1/mutation/true
W1013 22:47:58.397965  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:58.397970  116659 warnings.go:70] v1/validation/true
W1013 22:47:58.397974  116659 warnings.go:70] v1/validation/false
W1013 22:47:58.397979  116659 warnings.go:70] v1beta1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions.status/update (0.75s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions.status/patch
I1013 22:47:58.741709  116659 trace.go:205] Trace[694540363]: "GuaranteedUpdate etcd3" type:*apiextensions.CustomResourceDefinition (13-Oct-2021 22:47:58.074) (total time: 666ms):
Trace[694540363]: ---"initial value restored" 320ms (22:47:58.395)
Trace[694540363]: ---"Transaction committed" 322ms (22:47:58.731)
Trace[694540363]: [666.701582ms] [666.701582ms] END
I1013 22:47:58.741854  116659 trace.go:205] Trace[298857855]: "Update" url:/apis/apiextensions.k8s.io/v1/customresourcedefinitions/openshiftwebconsoleconfigs.webconsole2.operator.openshift.io/status,user-agent:admissionwebhook.test/v0.0.0 (linux/amd64) kubernetes/$Format,audit-id:ac05e770-4911-49a7-b014-dd6d67840982,client:127.0.0.1,accept:application/vnd.kubernetes.protobuf, */*,protocol:HTTP/2.0 (13-Oct-2021 22:47:58.074) (total time: 667ms):
Trace[298857855]: [667.059951ms] [667.059951ms] END
W1013 22:47:58.742293  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:58.742311  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:58.742316  116659 warnings.go:70] v1/mutation/false
W1013 22:47:58.742320  116659 warnings.go:70] v1/mutation/true
W1013 22:47:58.742325  116659 warnings.go:70] v1/validation/true
W1013 22:47:58.742329  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:58.742333  116659 warnings.go:70] v1/validation/false
W1013 22:47:58.742338  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:59.195494  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:59.195528  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:59.195540  116659 warnings.go:70] v1/mutation/false
W1013 22:47:59.195545  116659 warnings.go:70] v1/mutation/true
W1013 22:47:59.195549  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:59.195553  116659 warnings.go:70] v1/validation/true
W1013 22:47:59.195558  116659 warnings.go:70] v1/validation/false
W1013 22:47:59.195561  116659 warnings.go:70] v1beta1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiextensions.k8s.io.v1.customresourcedefinitions.status/patch (0.48s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices (1.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/create (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/update
W1013 22:47:59.247477  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:59.247505  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:59.247509  116659 warnings.go:70] v1/mutation/false
W1013 22:47:59.247511  116659 warnings.go:70] v1/mutation/true
W1013 22:47:59.247514  116659 warnings.go:70] v1/validation/false
W1013 22:47:59.247517  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:59.247520  116659 warnings.go:70] v1/validation/true
W1013 22:47:59.247526  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:59.311664  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:47:59.311693  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:47:59.311698  116659 warnings.go:70] v1/mutation/false
W1013 22:47:59.311702  116659 warnings.go:70] v1/mutation/true
W1013 22:47:59.311707  116659 warnings.go:70] v1beta1/validation/false
W1013 22:47:59.311712  116659 warnings.go:70] v1/validation/true
W1013 22:47:59.311715  116659 warnings.go:70] v1beta1/validation/true
W1013 22:47:59.311720  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/patch (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/delete (0.51s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices/deletecollection (0.28s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices.status (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices.status/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apiregistration.k8s.io.v1.apiservices.status/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions (0.95s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/create
I1013 22:48:00.430565  116659 controller.go:611] quota admission added evaluator for: controllerrevisions.apps
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/create (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/delete (0.45s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.controllerrevisions/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets (0.77s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/create
I1013 22:48:01.384625  116659 controller.go:611] quota admission added evaluator for: daemonsets.apps
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/delete (0.46s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets/deletecollection (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets.status (0.24s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets.status/update (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.daemonsets.status/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments (1.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/create
I1013 22:48:02.398521  116659 controller.go:611] quota admission added evaluator for: deployments.apps
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/create (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/delete (0.95s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments/deletecollection (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.scale
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.scale (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.scale/update
W1013 22:48:03.851225  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:48:03.859229  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:03.859263  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:03.859270  116659 warnings.go:70] v1/mutation/false
W1013 22:48:03.859274  116659 warnings.go:70] v1/mutation/true
E1013 22:48:03.859334  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.scale/update (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.scale/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.scale/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.status (0.24s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.status/update (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.deployments.status/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets (1.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/create
I1013 22:48:04.255746  116659 controller.go:611] quota admission added evaluator for: replicasets.apps
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/create (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/patch (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/delete (0.58s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.scale
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.scale (0.28s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.scale/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.scale/update (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.scale/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.scale/patch (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.status (0.31s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.status/update (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.replicasets.status/patch (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets (1.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/create
I1013 22:48:05.955808  116659 controller.go:611] quota admission added evaluator for: statefulsets.apps
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/delete (0.58s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.scale
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.scale (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.scale/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.scale/update (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.scale/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.scale/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.status (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.status/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/apps.v1.statefulsets.status/patch (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authentication.k8s.io.v1.tokenreviews
    --- PASS: TestWebhookAdmissionWithoutWatchCache/authentication.k8s.io.v1.tokenreviews (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authentication.k8s.io.v1.tokenreviews/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/authentication.k8s.io.v1.tokenreviews/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.localsubjectaccessreviews
    --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.localsubjectaccessreviews (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.localsubjectaccessreviews/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.localsubjectaccessreviews/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectaccessreviews
    --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectaccessreviews (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectaccessreviews/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectaccessreviews/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectrulesreviews
    --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectrulesreviews (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectrulesreviews/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.selfsubjectrulesreviews/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.subjectaccessreviews
    --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.subjectaccessreviews (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.subjectaccessreviews/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/authorization.k8s.io.v1.subjectaccessreviews/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers (0.73s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/create
I1013 22:48:07.449312  116659 controller.go:611] quota admission added evaluator for: horizontalpodautoscalers.autoscaling
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/patch (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/delete (0.34s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers/deletecollection (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers.status (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers.status/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v1.horizontalpodautoscalers.status/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers (0.99s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/delete (0.58s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers.status (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers.status/update (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta1.horizontalpodautoscalers.status/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers (0.95s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/delete (0.59s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers/deletecollection (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers.status (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers.status/update (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/autoscaling.v2beta2.horizontalpodautoscalers.status/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas
    --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas (0.86s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/create (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/delete (0.46s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas/deletecollection (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.scale
    --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.scale (0.28s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.scale/update
E1013 22:48:11.761301  116659 fieldmanager.go:204] "[SHOULD NOT HAPPEN] failed to update managedFields" VersionKind="autoscaling/v1, Kind=Scale" namespace="" name="cr3panda"
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.scale/update (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.scale/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.scale/patch (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.status (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.status/update (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v1.pandas.status/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas
    --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas (1.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/create (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/patch (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/delete (0.74s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas/deletecollection (0.24s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.scale
    --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.scale (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.scale/update
E1013 22:48:13.436737  116659 fieldmanager.go:204] "[SHOULD NOT HAPPEN] failed to update managedFields" VersionKind="autoscaling/v1, Kind=Scale" namespace="" name="cr4panda"
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.scale/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.scale/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.scale/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.status (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.status/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/awesome.bears.com.v3.pandas.status/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs
    --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs (0.88s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/create
I1013 22:48:13.635747  116659 controller.go:611] quota admission added evaluator for: cronjobs.batch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/create (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/delete
W1013 22:48:13.858217  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:48:13.864620  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:13.864638  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:13.864642  116659 warnings.go:70] v1/mutation/false
W1013 22:48:13.864645  116659 warnings.go:70] v1/mutation/true
E1013 22:48:13.864693  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/delete (0.46s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs.status (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs.status/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.cronjobs.status/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs
    --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs (0.77s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/create
I1013 22:48:14.739739  116659 controller.go:611] quota admission added evaluator for: jobs.batch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/update (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/delete (0.39s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs.status (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs.status/update (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1.jobs.status/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs
    --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/delete (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs/deletecollection (0.24s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs.status (0.30s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs.status/update (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/batch.v1beta1.cronjobs.status/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests
    --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests (0.82s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/update (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/delete (0.41s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.approval
    --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.approval (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.approval/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.approval/update (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.approval/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.approval/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.status (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.status/update (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/certificates.k8s.io.v1.certificatesigningrequests.status/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases
    --- PASS: TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases (0.57s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/create
I1013 22:48:18.264571  116659 controller.go:611] quota admission added evaluator for: leases.coordination.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/create (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/update (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/delete (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/coordination.k8s.io.v1.leases/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos
    --- PASS: TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos (0.84s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/create
I1013 22:48:19.016151  116659 controller.go:611] quota admission added evaluator for: foos.cr.bar.com
        --- PASS: TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/create (0.19s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/delete (0.32s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/cr.bar.com.v1.foos/deletecollection (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants
    --- PASS: TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants (0.75s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/create (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/update (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/delete (0.34s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/custom.fancy.com.v2.pants/deletecollection (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices
    --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices (0.69s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/create
I1013 22:48:20.426091  116659 controller.go:611] quota admission added evaluator for: endpointslices.discovery.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/delete (0.40s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1.endpointslices/deletecollection (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices
    --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/delete (0.53s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/discovery.k8s.io.v1beta1.endpointslices/deletecollection (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events
    --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/create
I1013 22:48:22.004374  116659 controller.go:611] quota admission added evaluator for: events.events.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/create (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/delete (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1.events/deletecollection (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events
    --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events (1.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/patch (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/delete (0.52s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/deletecollection
W1013 22:48:23.929184  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:48:23.935694  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:23.935726  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:23.935732  116659 warnings.go:70] v1/mutation/false
W1013 22:48:23.935737  116659 warnings.go:70] v1/mutation/true
E1013 22:48:23.935788  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/events.k8s.io.v1beta1.events/deletecollection (0.29s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas (1.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/update
W1013 22:48:24.083516  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:24.083535  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:24.083540  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:24.083544  116659 warnings.go:70] v1/mutation/false
W1013 22:48:24.083548  116659 warnings.go:70] v1/mutation/true
W1013 22:48:24.083553  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:24.083557  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:24.083562  116659 warnings.go:70] v1/validation/false
W1013 22:48:24.083566  116659 warnings.go:70] v1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/update (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/delete
W1013 22:48:24.570076  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:24.570098  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:24.570102  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:24.570105  116659 warnings.go:70] v1/mutation/false
W1013 22:48:24.570107  116659 warnings.go:70] v1/mutation/true
W1013 22:48:24.570114  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:24.570118  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:24.570121  116659 warnings.go:70] v1/validation/false
W1013 22:48:24.570123  116659 warnings.go:70] v1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/delete (0.52s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/deletecollection
W1013 22:48:24.839561  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:24.839579  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:24.839598  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:24.839602  116659 warnings.go:70] v1/mutation/false
W1013 22:48:24.839607  116659 warnings.go:70] v1/mutation/true
W1013 22:48:24.839616  116659 warnings.go:70] v1/validation/true
W1013 22:48:24.839621  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:24.839625  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:24.839631  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas/deletecollection (0.37s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas.status (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas.status/update
W1013 22:48:25.265478  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:25.265504  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:25.265509  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:25.265514  116659 warnings.go:70] v1/mutation/false
W1013 22:48:25.265518  116659 warnings.go:70] v1/mutation/true
W1013 22:48:25.265523  116659 warnings.go:70] v1/validation/false
W1013 22:48:25.265527  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:25.265532  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:25.265536  116659 warnings.go:70] v1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas.status/update (0.22s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.flowschemas.status/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations (0.83s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/delete (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations.status (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations.status/update (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1alpha1.prioritylevelconfigurations.status/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas (1.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/update
W1013 22:48:26.529586  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:26.529613  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:26.529618  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:26.529623  116659 warnings.go:70] v1/mutation/false
W1013 22:48:26.529628  116659 warnings.go:70] v1/mutation/true
W1013 22:48:26.529632  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:26.529638  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:26.529642  116659 warnings.go:70] v1/validation/true
W1013 22:48:26.529647  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/update (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/delete
W1013 22:48:27.031561  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:27.031587  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:27.031592  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:27.031600  116659 warnings.go:70] v1/mutation/false
W1013 22:48:27.031605  116659 warnings.go:70] v1/mutation/true
W1013 22:48:27.031609  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:27.031614  116659 warnings.go:70] v1/validation/true
W1013 22:48:27.031618  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:27.031622  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/delete (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/deletecollection
2021/10/13 22:48:27 http: TLS handshake error from 127.0.0.1:50094: EOF
2021/10/13 22:48:27 http: TLS handshake error from 127.0.0.1:50098: EOF
2021/10/13 22:48:27 http: TLS handshake error from 127.0.0.1:50096: EOF
W1013 22:48:27.390655  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:27.390682  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:27.390686  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:27.390690  116659 warnings.go:70] v1/mutation/false
W1013 22:48:27.390695  116659 warnings.go:70] v1/mutation/true
W1013 22:48:27.390699  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:27.390704  116659 warnings.go:70] v1/validation/true
W1013 22:48:27.390708  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:27.390713  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas/deletecollection (0.34s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas.status (0.46s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas.status/update
W1013 22:48:27.675571  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:27.675601  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:27.675607  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:27.675611  116659 warnings.go:70] v1/mutation/false
W1013 22:48:27.675615  116659 warnings.go:70] v1/mutation/true
W1013 22:48:27.675618  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:27.675622  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:27.675627  116659 warnings.go:70] v1/validation/true
W1013 22:48:27.675631  116659 warnings.go:70] v1/validation/false
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas.status/update (0.41s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.flowschemas.status/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations (0.67s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/delete (0.39s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations/deletecollection (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations.status (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations.status/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta1.prioritylevelconfigurations.status/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas (1.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/create (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/update
W1013 22:48:28.949334  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:28.949368  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:28.949373  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:28.949378  116659 warnings.go:70] v1/mutation/false
W1013 22:48:28.949382  116659 warnings.go:70] v1/mutation/true
W1013 22:48:28.949417  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:28.949422  116659 warnings.go:70] v1/validation/false
W1013 22:48:28.949426  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:28.949430  116659 warnings.go:70] v1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/update (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/delete
W1013 22:48:29.406327  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:29.406355  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:29.406360  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:29.406365  116659 warnings.go:70] v1/mutation/false
W1013 22:48:29.406369  116659 warnings.go:70] v1/mutation/true
W1013 22:48:29.406379  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:29.406383  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:29.406388  116659 warnings.go:70] v1/validation/false
W1013 22:48:29.406392  116659 warnings.go:70] v1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/delete (0.52s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/deletecollection
W1013 22:48:29.794709  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:29.794737  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:29.794742  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:29.794747  116659 warnings.go:70] v1/mutation/false
W1013 22:48:29.794751  116659 warnings.go:70] v1/mutation/true
W1013 22:48:29.794756  116659 warnings.go:70] v1/validation/false
W1013 22:48:29.794760  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:29.794764  116659 warnings.go:70] v1beta1/validation/true
W1013 22:48:29.794769  116659 warnings.go:70] v1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas/deletecollection (0.34s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas.status (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas.status/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas.status/patch
W1013 22:48:30.108974  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:30.109000  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:30.109004  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:30.109009  116659 warnings.go:70] v1/mutation/false
W1013 22:48:30.109017  116659 warnings.go:70] v1/mutation/true
W1013 22:48:30.109021  116659 warnings.go:70] v1/validation/true
W1013 22:48:30.109025  116659 warnings.go:70] v1beta1/validation/false
W1013 22:48:30.109029  116659 warnings.go:70] v1/validation/false
W1013 22:48:30.109034  116659 warnings.go:70] v1beta1/validation/true
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.flowschemas.status/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations (0.79s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/patch (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/delete (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations/deletecollection (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations.status (0.23s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations.status/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/flowcontrol.apiserver.k8s.io.v1beta2.prioritylevelconfigurations.status/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions
    --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions (0.95s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/update (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/patch (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/delete (0.48s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions.status (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions.status/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/internal.apiserver.k8s.io.v1alpha1.storageversions.status/patch (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses (1.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/delete (0.63s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingressclasses/deletecollection (0.21s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses (0.87s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/create
I1013 22:48:33.274637  116659 controller.go:611] quota admission added evaluator for: ingresses.networking.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/delete (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/deletecollection
W1013 22:48:33.889659  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:48:33.895647  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:33.895672  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:33.895676  116659 warnings.go:70] v1/mutation/false
W1013 22:48:33.895679  116659 warnings.go:70] v1/mutation/true
E1013 22:48:33.895719  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses.status (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses.status/update
W1013 22:48:34.244894  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.276815  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses.status/update (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses.status/patch
W1013 22:48:34.306023  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.334160  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.388615  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.ingresses.status/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies
    --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/create
W1013 22:48:34.392064  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
I1013 22:48:34.396640  116659 controller.go:611] quota admission added evaluator for: networkpolicies.networking.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/update
W1013 22:48:34.422219  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.451920  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.477905  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/patch
W1013 22:48:34.574562  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.614729  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/patch (0.16s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/delete
W1013 22:48:34.664395  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.710617  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.750299  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.776823  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.800018  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.850313  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.878305  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.909765  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.929159  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:34.995434  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 FlowSchema is deprecated in v1.26+, unavailable in v1.29+
W1013 22:48:35.019569  116659 warnings.go:70] flowcontrol.apiserver.k8s.io/v1beta2 PriorityLevelConfiguration is deprecated in v1.26+, unavailable in v1.29+
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/delete (0.49s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/networking.k8s.io.v1.networkpolicies/deletecollection (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses (0.83s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/delete (0.46s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1.runtimeclasses/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses (0.85s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/create (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/update (0.04s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/delete (0.38s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1alpha1.runtimeclasses/deletecollection (0.27s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/delete (0.53s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/node.k8s.io.v1beta1.runtimeclasses/deletecollection (0.18s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets
    --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets (0.82s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/create
I1013 22:48:37.857269  116659 controller.go:611] quota admission added evaluator for: poddisruptionbudgets.policy
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/delete (0.42s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets/deletecollection (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets.status (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets.status/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1.poddisruptionbudgets.status/patch (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets
    --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets (0.71s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/update (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/delete (0.34s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets/deletecollection (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets.status (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets.status/update (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.poddisruptionbudgets.status/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies
    --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies (0.79s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/delete (0.39s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/policy.v1beta1.podsecuritypolicies/deletecollection (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers (0.96s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/create (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/delete (0.49s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/random.numbers.com.v1.integers/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings
    --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings (0.72s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/delete (0.41s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterrolebindings/deletecollection (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles
    --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles (0.94s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/update (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/delete (0.52s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.clusterroles/deletecollection (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings
    --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/create
I1013 22:48:43.134109  116659 controller.go:611] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/patch (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/delete (0.49s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/deletecollection
W1013 22:48:43.835916  116659 lease.go:233] Resetting endpoints for master service "kubernetes" to [127.0.0.1]
W1013 22:48:43.843188  116659 warnings.go:70] v1beta1/mutation/false
W1013 22:48:43.843284  116659 warnings.go:70] v1beta1/mutation/true
W1013 22:48:43.843335  116659 warnings.go:70] v1/mutation/false
W1013 22:48:43.843352  116659 warnings.go:70] v1/mutation/true
E1013 22:48:43.843409  116659 controller.go:228] unable to sync kubernetes service: Endpoints "kubernetes" is invalid: subsets[0].addresses[0].ip: Invalid value: "127.0.0.1": may not be in the loopback range (127.0.0.0/8, ::1/128)
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.rolebindings/deletecollection (0.25s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles
    --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles (0.89s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/create
I1013 22:48:44.028038  116659 controller.go:611] quota admission added evaluator for: roles.rbac.authorization.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/patch (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/delete (0.36s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/rbac.authorization.k8s.io.v1.roles/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses (0.74s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/create (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/update (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/patch (0.09s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/delete (0.35s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/scheduling.k8s.io.v1.priorityclasses/deletecollection (0.14s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers (0.66s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/create (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/update (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/delete (0.30s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csidrivers/deletecollection (0.17s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes (0.73s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/create (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/patch (0.10s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/delete (0.35s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.csinodes/deletecollection (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses (0.43s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/update (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/delete (0.20s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.storageclasses/deletecollection (0.13s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments (0.69s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/update (0.08s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/patch (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/delete (0.42s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments/deletecollection (0.15s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments.status
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments.status (0.12s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments.status/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments.status/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments.status/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1.volumeattachments.status/patch (0.07s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities (0.72s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/create
I1013 22:48:48.289364  116659 controller.go:611] quota admission added evaluator for: csistoragecapacities.storage.k8s.io
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/create (0.02s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/update (0.05s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/patch (0.06s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/delete (0.33s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1alpha1.csistoragecapacities/deletecollection (0.26s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities
    --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities (0.81s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/create
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/create (0.01s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/update
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/update (0.03s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/patch
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/patch (0.11s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/delete
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/delete (0.44s)
=== RUN   TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/deletecollection
        --- PASS: TestWebhookAdmissionWithoutWatchCache/storage.k8s.io.v1beta1.csistoragecapacities/deletecollection (0.21s)
I1013 22:48:49.807138  116659 apiapproval_controller.go:198] Shutting down KubernetesAPIApprovalPolicyConformantConditionController

				from junit_20211013-224333.xml

Find pod1 mentions in log files | View test history on testgrid


Show 3696 Passed Tests

Show 32 Skipped Tests

Error lines from build-log.txt

... skipping 76 lines ...
Recording: record_command_canary
Running command: record_command_canary

+++ Running case: test-cmd.record_command_canary 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: record_command_canary
/home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh: line 157: bogus-expected-to-fail: command not found
!!! [1013 22:30:10] Call tree:
!!! [1013 22:30:10]  1: /home/prow/go/src/k8s.io/kubernetes/test/cmd/../../third_party/forked/shell2junit/sh2ju.sh:47 record_command_canary(...)
!!! [1013 22:30:10]  2: /home/prow/go/src/k8s.io/kubernetes/test/cmd/../../third_party/forked/shell2junit/sh2ju.sh:112 eVal(...)
!!! [1013 22:30:10]  3: /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh:133 juLog(...)
!!! [1013 22:30:10]  4: /home/prow/go/src/k8s.io/kubernetes/test/cmd/legacy-script.sh:161 record_command(...)
!!! [1013 22:30:10]  5: hack/make-rules/test-cmd.sh:35 source(...)
+++ exit code: 1
+++ error: 1
+++ [1013 22:30:10] Running kubeadm tests
+++ [1013 22:30:14] Building go targets for linux/amd64:
    cmd/kubeadm
> static build CGO_ENABLED=0: k8s.io/kubernetes/cmd/kubeadm
+++ [1013 22:31:13] Running tests without code coverage 
{"Time":"2021-10-13T22:31:57.811904679Z","Action":"output","Package":"k8s.io/kubernetes/cmd/kubeadm/test/cmd","Output":"ok  \tk8s.io/kubernetes/cmd/kubeadm/test/cmd\t41.630s\n"}
... skipping 202 lines ...
+++ [1013 22:34:36] Building go targets for linux/amd64:
    cmd/kube-controller-manager
> static build CGO_ENABLED=0: k8s.io/kubernetes/cmd/kube-controller-manager
+++ [1013 22:35:09] Generate kubeconfig for controller-manager
+++ [1013 22:35:09] Starting controller-manager
I1013 22:35:10.139642   56856 serving.go:348] Generated self-signed cert in-memory
W1013 22:35:10.943177   56856 authentication.go:419] failed to read in-cluster kubeconfig for delegated authentication: open /var/run/secrets/kubernetes.io/serviceaccount/token: no such file or directory
W1013 22:35:10.943234   56856 authentication.go:316] No authentication-kubeconfig provided in order to lookup client-ca-file in configmap/extension-apiserver-authentication in kube-system, so client certificate authentication won't work.
W1013 22:35:10.943244   56856 authentication.go:340] No authentication-kubeconfig provided in order to lookup requestheader-client-ca-file in configmap/extension-apiserver-authentication in kube-system, so request-header client certificate authentication won't work.
W1013 22:35:10.943262   56856 authorization.go:225] failed to read in-cluster kubeconfig for delegated authorization: open /var/run/secrets/kubernetes.io/serviceaccount/token: no such file or directory
W1013 22:35:10.943285   56856 authorization.go:193] No authorization-kubeconfig provided, so SubjectAccessReview of authorization tokens won't work.
I1013 22:35:10.943322   56856 controllermanager.go:188] Version: v1.23.0-alpha.3.306+585c88eb439bc9
I1013 22:35:10.945277   56856 secure_serving.go:200] Serving securely on [::]:10257
I1013 22:35:10.945370   56856 tlsconfig.go:240] "Starting DynamicServingCertificateController"
I1013 22:35:10.945691   56856 leaderelection.go:248] attempting to acquire leader lease kube-system/kube-controller-manager...
I1013 22:35:10.963356   53334 controller.go:611] quota admission added evaluator for: leases.coordination.k8s.io
... skipping 10 lines ...
I1013 22:35:11.043881   56856 shared_informer.go:240] Waiting for caches to sync for service account
I1013 22:35:11.044372   56856 garbagecollector.go:146] Starting garbage collector controller
I1013 22:35:11.044398   56856 shared_informer.go:240] Waiting for caches to sync for garbage collector
I1013 22:35:11.044426   56856 graph_builder.go:289] GraphBuilder running
I1013 22:35:11.044475   56856 controllermanager.go:597] Started "garbagecollector"
I1013 22:35:11.044761   56856 node_lifecycle_controller.go:76] Sending events to api server
E1013 22:35:11.044843   56856 core.go:211] failed to start cloud node lifecycle controller: no cloud provider provided
W1013 22:35:11.044856   56856 controllermanager.go:575] Skipping "cloud-node-lifecycle"
W1013 22:35:11.045284   56856 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
I1013 22:35:11.045360   56856 controllermanager.go:597] Started "ttl-after-finished"
W1013 22:35:11.045670   56856 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:35:11.045728   56856 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
W1013 22:35:11.045749   56856 mutation_detector.go:53] Mutation detector is enabled, this will result in memory leakage.
... skipping 109 lines ...
I1013 22:35:11.069426   56856 controllermanager.go:597] Started "disruption"
I1013 22:35:11.069752   56856 disruption.go:363] Starting disruption controller
I1013 22:35:11.069789   56856 shared_informer.go:240] Waiting for caches to sync for disruption
I1013 22:35:11.070118   56856 controllermanager.go:597] Started "horizontalpodautoscaling"
I1013 22:35:11.070513   56856 horizontal.go:169] Starting HPA controller
I1013 22:35:11.070630   56856 shared_informer.go:240] Waiting for caches to sync for HPA
E1013 22:35:11.070839   56856 core.go:92] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail
W1013 22:35:11.070873   56856 controllermanager.go:575] Skipping "service"
I1013 22:35:11.071203   56856 controllermanager.go:597] Started "pv-protection"
I1013 22:35:11.071284   56856 pv_protection_controller.go:83] Starting PV protection controller
I1013 22:35:11.071300   56856 shared_informer.go:240] Waiting for caches to sync for PV protection
I1013 22:35:11.072415   56856 controllermanager.go:597] Started "replicationcontroller"
I1013 22:35:11.072588   56856 replica_set.go:186] Starting replicationcontroller controller
... skipping 66 lines ...
I1013 22:35:11.467800   56856 shared_informer.go:247] Caches are synced for resource quota 
I1013 22:35:11.479404   56856 shared_informer.go:247] Caches are synced for resource quota 
I1013 22:35:11.943419   56856 shared_informer.go:247] Caches are synced for garbage collector 
I1013 22:35:11.944616   56856 shared_informer.go:247] Caches are synced for garbage collector 
I1013 22:35:11.944647   56856 garbagecollector.go:155] Garbage collector: all resource monitors have synced. Proceeding to collect garbage
node/127.0.0.1 created
W1013 22:35:13.361386   56856 actual_state_of_world.go:534] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist
+++ [1013 22:35:13] Checking kubectl version
Client Version: version.Info{Major:"1", Minor:"23+", GitVersion:"v1.23.0-alpha.3.306+585c88eb439bc9", GitCommit:"585c88eb439bc9743aa375583a5faced16e37a38", GitTreeState:"clean", BuildDate:"2021-10-13T19:21:55Z", GoVersion:"go1.17.1", Compiler:"gc", Platform:"linux/amd64"}
Server Version: version.Info{Major:"1", Minor:"23+", GitVersion:"v1.23.0-alpha.3.306+585c88eb439bc9", GitCommit:"585c88eb439bc9743aa375583a5faced16e37a38", GitTreeState:"clean", BuildDate:"2021-10-13T19:21:55Z", GoVersion:"go1.17.1", Compiler:"gc", Platform:"linux/amd64"}
The Service "kubernetes" is invalid: spec.clusterIPs: Invalid value: []string{"10.0.0.1"}: failed to allocate IP 10.0.0.1: provided IP is already allocated
NAME         TYPE        CLUSTER-IP   EXTERNAL-IP   PORT(S)   AGE
kubernetes   ClusterIP   10.0.0.1     <none>        443/TCP   42s
Recording: run_kubectl_version_tests
Running command: run_kubectl_version_tests

+++ Running case: test-cmd.run_kubectl_version_tests 
... skipping 100 lines ...
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_RESTMapper_evaluation_tests
+++ [1013 22:35:18] Creating namespace namespace-1634164518-27178
namespace/namespace-1634164518-27178 created
Context "test" modified.
+++ [1013 22:35:19] Testing RESTMapper
+++ [1013 22:35:20] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype"
+++ exit code: 0
NAME                              SHORTNAMES   APIVERSION                             NAMESPACED   KIND
bindings                                       v1                                     true         Binding
componentstatuses                 cs           v1                                     false        ComponentStatus
configmaps                        cm           v1                                     true         ConfigMap
endpoints                         ep           v1                                     true         Endpoints
... skipping 61 lines ...
namespace/namespace-1634164531-25532 created
Context "test" modified.
+++ [1013 22:35:32] Testing clusterroles
rbac.sh:29: Successful get clusterroles/cluster-admin {{.metadata.name}}: cluster-admin
(Brbac.sh:30: Successful get clusterrolebindings/cluster-admin {{.metadata.name}}: cluster-admin
(BSuccessful
message:Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "pod-admin" not found
has:clusterroles.rbac.authorization.k8s.io "pod-admin" not found
clusterrole.rbac.authorization.k8s.io/pod-admin created (dry run)
clusterrole.rbac.authorization.k8s.io/pod-admin created (server dry run)
Successful
message:Error from server (NotFound): clusterroles.rbac.authorization.k8s.io "pod-admin" not found
has:clusterroles.rbac.authorization.k8s.io "pod-admin" not found
clusterrole.rbac.authorization.k8s.io/pod-admin created
rbac.sh:42: Successful get clusterrole/pod-admin {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: *:
(BSuccessful
message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
clusterrole.rbac.authorization.k8s.io "pod-admin" deleted
... skipping 18 lines ...
(Bclusterrole.rbac.authorization.k8s.io/url-reader created
rbac.sh:61: Successful get clusterrole/url-reader {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: get:
(Brbac.sh:62: Successful get clusterrole/url-reader {{range.rules}}{{range.nonResourceURLs}}{{.}}:{{end}}{{end}}: /logs/*:/healthz/*:
(Bclusterrole.rbac.authorization.k8s.io/aggregation-reader created
rbac.sh:64: Successful get clusterrole/aggregation-reader {{.metadata.name}}: aggregation-reader
(BSuccessful
message:Error from server (NotFound): clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
has:clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
clusterrolebinding.rbac.authorization.k8s.io/super-admin created (dry run)
clusterrolebinding.rbac.authorization.k8s.io/super-admin created (server dry run)
Successful
message:Error from server (NotFound): clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
has:clusterrolebindings.rbac.authorization.k8s.io "super-admin" not found
clusterrolebinding.rbac.authorization.k8s.io/super-admin created
rbac.sh:77: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:
(Bclusterrolebinding.rbac.authorization.k8s.io/super-admin subjects updated (dry run)
clusterrolebinding.rbac.authorization.k8s.io/super-admin subjects updated (server dry run)
rbac.sh:80: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:
... skipping 64 lines ...
rbac.sh:102: Successful get clusterrolebinding/super-admin {{range.subjects}}{{.name}}:{{end}}: super-admin:foo:test-all-user:
(Brbac.sh:103: Successful get clusterrolebinding/super-group {{range.subjects}}{{.name}}:{{end}}: the-group:foo:test-all-user:
(Brbac.sh:104: Successful get clusterrolebinding/super-sa {{range.subjects}}{{.name}}:{{end}}: sa-name:foo:test-all-user:
(Brolebinding.rbac.authorization.k8s.io/admin created (dry run)
rolebinding.rbac.authorization.k8s.io/admin created (server dry run)
Successful
message:Error from server (NotFound): rolebindings.rbac.authorization.k8s.io "admin" not found
has: not found
rolebinding.rbac.authorization.k8s.io/admin created
rbac.sh:113: Successful get rolebinding/admin {{.roleRef.kind}}: ClusterRole
(Brbac.sh:114: Successful get rolebinding/admin {{range.subjects}}{{.name}}:{{end}}: default-admin:
(Brolebinding.rbac.authorization.k8s.io/admin subjects updated
rbac.sh:116: Successful get rolebinding/admin {{range.subjects}}{{.name}}:{{end}}: default-admin:foo:
... skipping 152 lines ...
namespace/namespace-1634164543-30318 created
Context "test" modified.
+++ [1013 22:35:43] Testing role
role.rbac.authorization.k8s.io/pod-admin created (dry run)
role.rbac.authorization.k8s.io/pod-admin created (server dry run)
Successful
message:Error from server (NotFound): roles.rbac.authorization.k8s.io "pod-admin" not found
has: not found
role.rbac.authorization.k8s.io/pod-admin created
rbac.sh:159: Successful get role/pod-admin {{range.rules}}{{range.verbs}}{{.}}:{{end}}{{end}}: *:
(Brbac.sh:160: Successful get role/pod-admin {{range.rules}}{{range.resources}}{{.}}:{{end}}{{end}}: pods:
(Brbac.sh:161: Successful get role/pod-admin {{range.rules}}{{range.apiGroups}}{{.}}:{{end}}{{end}}: :
(BSuccessful
... skipping 440 lines ...
has:valid-pod
Successful
message:NAME        READY   STATUS    RESTARTS   AGE
valid-pod   0/1     Pending   0          0s
has:valid-pod
core.sh:194: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Berror: resource(s) were provided, but no name was specified
core.sh:198: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bcore.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Berror: setting 'all' parameter but found a non empty selector. 
core.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bcore.sh:210: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "valid-pod" force deleted
core.sh:214: Successful get pods -l'name in (valid-pod)' {{range.items}}{{.metadata.name}}:{{end}}: 
(Bcore.sh:219: Successful get namespaces {{range.items}}{{ if eq .metadata.name \"test-kubectl-describe-pod\" }}found{{end}}{{end}}:: :
... skipping 30 lines ...
I1013 22:36:01.102981   61598 round_trippers.go:541] GET https://127.0.0.1:6443/apis/policy/v1/namespaces/test-kubectl-describe-pod/poddisruptionbudgets/test-pdb-2 200 OK in 2 milliseconds
I1013 22:36:01.105439   61598 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/test-kubectl-describe-pod/events?fieldSelector=involvedObject.name%3Dtest-pdb-2%2CinvolvedObject.namespace%3Dtest-kubectl-describe-pod%2CinvolvedObject.kind%3DPodDisruptionBudget%2CinvolvedObject.uid%3D8e9574fe-170d-4ad6-85ca-5af21ee5aac6&limit=500 200 OK in 2 milliseconds
(Bpoddisruptionbudget.policy/test-pdb-3 created
core.sh:271: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2
(Bpoddisruptionbudget.policy/test-pdb-4 created
core.sh:275: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50%
(Berror: min-available and max-unavailable cannot be both specified
core.sh:281: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/env-test-pod created
matched TEST_CMD_1
matched <set to the key 'key-1' in secret 'test-secret'>
matched TEST_CMD_2
matched <set to the key 'key-2' of config map 'test-configmap'>
... skipping 240 lines ...
core.sh:542: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.6:
(BSuccessful
message:kubectl-create kubectl-patch
has:kubectl-patch
pod/valid-pod patched
core.sh:562: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
(B+++ [1013 22:36:22] "kubectl patch with resourceVersion 619" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again
pod "valid-pod" deleted
pod/valid-pod replaced
core.sh:586: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname
(BSuccessful
message:kubectl-replace
has:kubectl-replace
Successful
message:error: --grace-period must have --force specified
has:\-\-grace-period must have \-\-force specified
Successful
message:error: --timeout must have --force specified
has:\-\-timeout must have \-\-force specified
node/node-v1-test created
W1013 22:36:23.754073   56856 actual_state_of_world.go:534] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist
core.sh:614: Successful get node node-v1-test {{range.items}}{{if .metadata.annotations.a}}found{{end}}{{end}}:: :
(Bnode/node-v1-test replaced (server dry run)
node/node-v1-test replaced (dry run)
core.sh:639: Successful get node node-v1-test {{range.items}}{{if .metadata.annotations.a}}found{{end}}{{end}}:: :
(Bnode/node-v1-test replaced
core.sh:655: Successful get node node-v1-test {{.metadata.annotations.a}}: b
... skipping 29 lines ...
spec:
  containers:
  - image: k8s.gcr.io/pause:3.6
    name: kubernetes-pause
has:localonlyvalue
core.sh:691: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
(Berror: 'name' already has a value (valid-pod), and --overwrite is false
core.sh:695: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
(Bcore.sh:699: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
(Bpod/valid-pod labeled
core.sh:703: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod-super-sayan
(Bcore.sh:707: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
... skipping 83 lines ...
+++ Running case: test-cmd.run_kubectl_create_error_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_kubectl_create_error_tests
+++ [1013 22:36:36] Creating namespace namespace-1634164596-5437
namespace/namespace-1634164596-5437 created
Context "test" modified.
+++ [1013 22:36:36] Testing kubectl create with error
Error: must specify one of -f and -k

Create a resource from a file or from stdin.

 JSON and YAML formats are accepted.

Examples:
... skipping 43 lines ...

Usage:
  kubectl create -f FILENAME [options]

Use "kubectl <command> --help" for more information about a given command.
Use "kubectl options" for a list of global command-line options (applies to all commands).
+++ [1013 22:36:36] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false
+++ exit code: 0
Recording: run_kubectl_apply_tests
Running command: run_kubectl_apply_tests

+++ Running case: test-cmd.run_kubectl_apply_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 29 lines ...
I1013 22:36:40.717972   56856 event.go:294] "Event occurred" object="namespace-1634164596-1505/test-deployment-retainkeys-8695b756f8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: test-deployment-retainkeys-8695b756f8-2z5tv"
deployment.apps "test-deployment-retainkeys" deleted
apply.sh:88: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/selector-test-pod created
apply.sh:92: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
(BSuccessful
message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
has:pods "selector-test-pod-dont-apply" not found
pod "selector-test-pod" deleted
apply.sh:101: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/test-pod created (dry run)
pod/test-pod created (server dry run)
apply.sh:107: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 27 lines ...
(Bpod/b created
apply.sh:207: Successful get pods a {{.metadata.name}}: a
(Bapply.sh:208: Successful get pods b -n nsb {{.metadata.name}}: b
(Bpod "a" deleted
pod "b" deleted
Successful
message:error: all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector
has:all resources selected for prune without explicitly passing --all
pod/a created
pod/b created
service/prune-svc created
I1013 22:36:50.566651   56856 horizontal.go:361] Horizontal Pod Autoscaler frontend has been deleted in namespace-1634164593-10321
apply.sh:220: Successful get pods a {{.metadata.name}}: a
... skipping 35 lines ...
apply.sh:261: Successful get pods b -n nsb {{.metadata.name}}: b
(Bpod/b unchanged
pod/a pruned
apply.sh:265: Successful get pods -n nsb {{range.items}}{{.metadata.name}}:{{end}}: b:
(Bnamespace "nsb" deleted
Successful
message:error: the namespace from the provided object "nsb" does not match the namespace "foo". You must pass '--namespace=nsb' to perform this operation.
has:the namespace from the provided object "nsb" does not match the namespace "foo".
apply.sh:276: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: 
(Bservice/a created
apply.sh:280: Successful get services a {{.metadata.name}}: a
(BSuccessful
message:The Service "a" is invalid: spec.clusterIPs[0]: Invalid value: []string{"10.0.0.12"}: may not change once set
... skipping 26 lines ...
(Bapply.sh:302: Successful get deployment test-the-deployment {{.metadata.name}}: test-the-deployment
(Bapply.sh:303: Successful get service test-the-service {{.metadata.name}}: test-the-service
(Bconfigmap "test-the-map" deleted
service "test-the-service" deleted
deployment.apps "test-the-deployment" deleted
Successful
message:Error from server (NotFound): namespaces "multi-resource-ns" not found
has:namespaces "multi-resource-ns" not found
apply.sh:311: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:namespace/multi-resource-ns created
Error from server (NotFound): error when creating "hack/testdata/multi-resource-1.yaml": namespaces "multi-resource-ns" not found
has:namespaces "multi-resource-ns" not found
Successful
message:Error from server (NotFound): pods "test-pod" not found
has:pods "test-pod" not found
pod/test-pod created
namespace/multi-resource-ns unchanged
apply.sh:319: Successful get pods test-pod -n multi-resource-ns {{.metadata.name}}: test-pod
(Bpod "test-pod" deleted
namespace "multi-resource-ns" deleted
apply.sh:325: Successful get configmaps --field-selector=metadata.name=foo {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:configmap/foo created
error: unable to recognize "hack/testdata/multi-resource-2.yaml": no matches for kind "Bogus" in version "example.com/v1"
has:no matches for kind "Bogus" in version "example.com/v1"
apply.sh:331: Successful get configmaps foo {{.metadata.name}}: foo
(Bconfigmap "foo" deleted
apply.sh:337: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:pod/pod-a created
... skipping 5 lines ...
(Bpod "pod-a" deleted
pod "pod-c" deleted
apply.sh:345: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bapply.sh:349: Successful get crds {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:customresourcedefinition.apiextensions.k8s.io/widgets.example.com created
error: unable to recognize "hack/testdata/multi-resource-4.yaml": no matches for kind "Widget" in version "example.com/v1"
has:no matches for kind "Widget" in version "example.com/v1"
I1013 22:37:34.426276   56856 namespace_controller.go:185] Namespace has been deleted multi-resource-ns
Successful
message:Error from server (NotFound): widgets.example.com "foo" not found
has:widgets.example.com "foo" not found
apply.sh:355: Successful get crds widgets.example.com {{.metadata.name}}: widgets.example.com
(BI1013 22:37:35.213153   53334 controller.go:611] quota admission added evaluator for: widgets.example.com
widget.example.com/foo created
customresourcedefinition.apiextensions.k8s.io/widgets.example.com unchanged
apply.sh:358: Successful get widget foo {{.metadata.name}}: foo
... skipping 31 lines ...
message:900
has:900
pod "test-pod" deleted
apply.sh:414: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(B+++ [1013 22:37:38] Testing upgrade kubectl client-side apply to server-side apply
pod/test-pod created
error: Apply failed with 1 conflict: conflict with "kubectl-client-side-apply" using v1: .metadata.labels.name
Please review the fields above--they currently have other managers. Here
are the ways you can resolve this warning:
* If you intend to manage all of these fields, please re-run the apply
  command with the `--force-conflicts` flag.
* If you do not intend to manage all of the fields, please edit your
  manifest to remove references to the fields that should keep their
... skipping 74 lines ...
+++ [1013 22:37:43] Testing kubectl run
pod/nginx-extensions created (dry run)
pod/nginx-extensions created (server dry run)
run.sh:32: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Brun.sh:35: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BW1013 22:37:43.808822   53334 cacher.go:150] Terminating all watchers from cacher *unstructured.Unstructured
E1013 22:37:43.810892   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
pod/nginx-extensions created
run.sh:39: Successful get pod {{range.items}}{{.metadata.name}}:{{end}}: nginx-extensions:
(Bpod "nginx-extensions" deleted
Successful
message:pod/test1 created
has:pod/test1 created
pod "test1" deleted
Successful
message:error: Invalid image name "InvalidImageName": invalid reference format
has:error: Invalid image name "InvalidImageName": invalid reference format
+++ exit code: 0
Recording: run_kubectl_create_filter_tests
Running command: run_kubectl_create_filter_tests

+++ Running case: test-cmd.run_kubectl_create_filter_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_kubectl_create_filter_tests
+++ [1013 22:37:44] Creating namespace namespace-1634164664-17878
namespace/namespace-1634164664-17878 created
Context "test" modified.
+++ [1013 22:37:44] Testing kubectl create filter
E1013 22:37:44.737624   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
create.sh:50: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/selector-test-pod created
create.sh:54: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
(BSuccessful
message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
has:pods "selector-test-pod-dont-apply" not found
pod "selector-test-pod" deleted
+++ exit code: 0
Recording: run_kubectl_apply_deployments_tests
Running command: run_kubectl_apply_deployments_tests

... skipping 13 lines ...
apps.sh:127: Successful get deployments my-depl {{.metadata.name}}: my-depl
(Bapps.sh:129: Successful get deployments my-depl {{.spec.template.metadata.labels.l1}}: l1
(Bapps.sh:130: Successful get deployments my-depl {{.spec.selector.matchLabels.l1}}: l1
(Bapps.sh:131: Successful get deployments my-depl {{.metadata.labels.l1}}: l1
(Bdeployment.apps/my-depl configured
apps.sh:136: Successful get deployments my-depl {{.spec.template.metadata.labels.l1}}: l1
(BE1013 22:37:47.122503   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
apps.sh:137: Successful get deployments my-depl {{.spec.selector.matchLabels.l1}}: l1
(Bapps.sh:138: Successful get deployments my-depl {{.metadata.labels.l1}}: <no value>
(Bdeployment.apps "my-depl" deleted
replicaset.apps "my-depl-84fb47b469" deleted
pod "my-depl-84fb47b469-q7kdw" deleted
apps.sh:144: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 3 lines ...
(Bdeployment.apps/nginx created
I1013 22:37:48.369032   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-9bb9c4878 to 3"
I1013 22:37:48.447557   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-9bb9c4878" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-9bb9c4878-p74rz"
I1013 22:37:48.478349   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-9bb9c4878" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-9bb9c4878-mk5d6"
I1013 22:37:48.478392   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-9bb9c4878" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-9bb9c4878-bkdpx"
apps.sh:154: Successful get deployment nginx {{.metadata.name}}: nginx
(BE1013 22:37:52.726943   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Successful
message:Error from server (Conflict): error when applying patch:
{"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"apps/v1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1634164665-27409\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}}
to:
Resource: "apps/v1, Resource=deployments", GroupVersionKind: "apps/v1, Kind=Deployment"
Name: "nginx", Namespace: "namespace-1634164665-27409"
for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.apps "nginx": the object has been modified; please apply your changes to the latest version and try again
has:Error from server (Conflict)
deployment.apps/nginx configured
I1013 22:37:57.150249   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-6dd6cfdb57 to 3"
I1013 22:37:57.232426   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-xfmrj"
I1013 22:37:57.268678   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-wq2jj"
I1013 22:37:57.268717   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-q68zc"
Successful
message:        "name": "nginx2"
          "name": "nginx2"
has:"name": "nginx2"
E1013 22:37:59.163302   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Successful
message:The Deployment "nginx" is invalid: spec.template.metadata.labels: Invalid value: map[string]string{"name":"nginx3"}: `selector` does not match template `labels`
has:Invalid value
I1013 22:38:01.669668   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-6dd6cfdb57 to 3"
I1013 22:38:01.716267   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-ftpgf"
I1013 22:38:01.737074   56856 event.go:294] "Event occurred" object="namespace-1634164665-27409/nginx-6dd6cfdb57" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-6dd6cfdb57-9nwlk"
... skipping 299 lines ...
+++ [1013 22:38:05] Creating namespace namespace-1634164685-25572
namespace/namespace-1634164685-25572 created
Context "test" modified.
+++ [1013 22:38:06] Testing kubectl get
get.sh:29: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
get.sh:37: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
get.sh:45: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:{
    "apiVersion": "v1",
    "items": [],
... skipping 23 lines ...
has not:No resources found
Successful
message:NAME
has not:No resources found
get.sh:73: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:error: the server doesn't have a resource type "foobar"
has not:No resources found
Successful
message:No resources found in namespace-1634164685-25572 namespace.
has:No resources found
Successful
message:
has not:No resources found
Successful
message:No resources found in namespace-1634164685-25572 namespace.
has:No resources found
get.sh:93: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
Successful
message:Error from server (NotFound): pods "abc" not found
has not:List
Successful
message:I1013 22:38:09.090665   68888 loader.go:372] Config loaded from file:  /tmp/tmp.yBKjjCvlKb/.kube/config
I1013 22:38:09.097345   68888 round_trippers.go:541] GET https://127.0.0.1:6443/version?timeout=32s 200 OK in 5 milliseconds
I1013 22:38:09.142089   68888 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/default/pods 200 OK in 2 milliseconds
I1013 22:38:09.144216   68888 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/default/replicationcontrollers 200 OK in 1 milliseconds
... skipping 598 lines ...
}
get.sh:158: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(B<no value>Successful
message:valid-pod:
has:valid-pod:
Successful
message:error: error executing jsonpath "{.missing}": Error executing template: missing is not found. Printing more information for debugging the template:
	template was:
		{.missing}
	object given to jsonpath engine was:
		map[string]interface {}{"apiVersion":"v1", "kind":"Pod", "metadata":map[string]interface {}{"creationTimestamp":"2021-10-13T22:38:17Z", "labels":map[string]interface {}{"name":"valid-pod"}, "managedFields":[]interface {}{map[string]interface {}{"apiVersion":"v1", "fieldsType":"FieldsV1", "fieldsV1":map[string]interface {}{"f:metadata":map[string]interface {}{"f:labels":map[string]interface {}{".":map[string]interface {}{}, "f:name":map[string]interface {}{}}}, "f:spec":map[string]interface {}{"f:containers":map[string]interface {}{"k:{\"name\":\"kubernetes-serve-hostname\"}":map[string]interface {}{".":map[string]interface {}{}, "f:image":map[string]interface {}{}, "f:imagePullPolicy":map[string]interface {}{}, "f:name":map[string]interface {}{}, "f:resources":map[string]interface {}{".":map[string]interface {}{}, "f:limits":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}, "f:requests":map[string]interface {}{".":map[string]interface {}{}, "f:cpu":map[string]interface {}{}, "f:memory":map[string]interface {}{}}}, "f:terminationMessagePath":map[string]interface {}{}, "f:terminationMessagePolicy":map[string]interface {}{}}}, "f:dnsPolicy":map[string]interface {}{}, "f:enableServiceLinks":map[string]interface {}{}, "f:restartPolicy":map[string]interface {}{}, "f:schedulerName":map[string]interface {}{}, "f:securityContext":map[string]interface {}{}, "f:terminationGracePeriodSeconds":map[string]interface {}{}}}, "manager":"kubectl-create", "operation":"Update", "time":"2021-10-13T22:38:17Z"}}, "name":"valid-pod", "namespace":"namespace-1634164696-9883", "resourceVersion":"1070", "uid":"a682306f-be8a-4e12-98f2-4cfac5c62078"}, "spec":map[string]interface {}{"containers":[]interface {}{map[string]interface {}{"image":"k8s.gcr.io/serve_hostname", "imagePullPolicy":"Always", "name":"kubernetes-serve-hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File"}}, "dnsPolicy":"ClusterFirst", "enableServiceLinks":true, "preemptionPolicy":"PreemptLowerPriority", "priority":0, "restartPolicy":"Always", "schedulerName":"default-scheduler", "securityContext":map[string]interface {}{}, "terminationGracePeriodSeconds":30}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}}
has:missing is not found
error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing"
Successful
message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template:
	template was:
		{{.missing}}
	raw data was:
		{"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2021-10-13T22:38:17Z","labels":{"name":"valid-pod"},"managedFields":[{"apiVersion":"v1","fieldsType":"FieldsV1","fieldsV1":{"f:metadata":{"f:labels":{".":{},"f:name":{}}},"f:spec":{"f:containers":{"k:{\"name\":\"kubernetes-serve-hostname\"}":{".":{},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:resources":{".":{},"f:limits":{".":{},"f:cpu":{},"f:memory":{}},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:enableServiceLinks":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}},"manager":"kubectl-create","operation":"Update","time":"2021-10-13T22:38:17Z"}],"name":"valid-pod","namespace":"namespace-1634164696-9883","resourceVersion":"1070","uid":"a682306f-be8a-4e12-98f2-4cfac5c62078"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","enableServiceLinks":true,"preemptionPolicy":"PreemptLowerPriority","priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}}
	object given to template engine was:
		map[apiVersion:v1 kind:Pod metadata:map[creationTimestamp:2021-10-13T22:38:17Z labels:map[name:valid-pod] managedFields:[map[apiVersion:v1 fieldsType:FieldsV1 fieldsV1:map[f:metadata:map[f:labels:map[.:map[] f:name:map[]]] f:spec:map[f:containers:map[k:{"name":"kubernetes-serve-hostname"}:map[.:map[] f:image:map[] f:imagePullPolicy:map[] f:name:map[] f:resources:map[.:map[] f:limits:map[.:map[] f:cpu:map[] f:memory:map[]] f:requests:map[.:map[] f:cpu:map[] f:memory:map[]]] f:terminationMessagePath:map[] f:terminationMessagePolicy:map[]]] f:dnsPolicy:map[] f:enableServiceLinks:map[] f:restartPolicy:map[] f:schedulerName:map[] f:securityContext:map[] f:terminationGracePeriodSeconds:map[]]] manager:kubectl-create operation:Update time:2021-10-13T22:38:17Z]] name:valid-pod namespace:namespace-1634164696-9883 resourceVersion:1070 uid:a682306f-be8a-4e12-98f2-4cfac5c62078] spec:map[containers:[map[image:k8s.gcr.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst enableServiceLinks:true preemptionPolicy:PreemptLowerPriority priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[] terminationGracePeriodSeconds:30] status:map[phase:Pending qosClass:Guaranteed]]
... skipping 3 lines ...
valid-pod   0/1     Pending   0          0s
has:STATUS
Successful
message:NAME        READY   STATUS    RESTARTS   AGE
valid-pod   0/1     Pending   0          0s
has:valid-pod
E1013 22:38:19.779912   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Successful
message:pod/valid-pod
has not:STATUS
Successful
message:pod/valid-pod
has:pod/valid-pod
... skipping 69 lines ...
  terminationGracePeriodSeconds: 30
status:
  phase: Pending
  qosClass: Guaranteed
has:name: valid-pod
Successful
message:Error from server (NotFound): pods "invalid-pod" not found
has:"invalid-pod" not found
pod "valid-pod" deleted
get.sh:196: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpod/redis-master created
pod/valid-pod created
Successful
... skipping 36 lines ...
+++ [1013 22:38:26] Creating namespace namespace-1634164706-13409
namespace/namespace-1634164706-13409 created
Context "test" modified.
+++ [1013 22:38:26] Testing kubectl exec POD COMMAND
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (NotFound): pods "abc" not found
has:pods "abc" not found
pod/test-pod created
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:pods "test-pod" not found
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:pod or type/name must be specified
pod "test-pod" deleted
+++ exit code: 0
Recording: run_kubectl_exec_resource_name_tests
Running command: run_kubectl_exec_resource_name_tests

... skipping 3 lines ...
+++ [1013 22:38:27] Creating namespace namespace-1634164707-5347
namespace/namespace-1634164707-5347 created
Context "test" modified.
+++ [1013 22:38:27] Testing kubectl exec TYPE/NAME COMMAND
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
error: the server doesn't have a resource type "foo"
has:error:
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (NotFound): deployments.apps "bar" not found
has:"bar" not found
pod/test-pod created
replicaset.apps/frontend created
I1013 22:38:29.329137   56856 event.go:294] "Event occurred" object="namespace-1634164707-5347/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-bhf7m"
I1013 22:38:29.343334   56856 event.go:294] "Event occurred" object="namespace-1634164707-5347/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-tgq6p"
I1013 22:38:29.343374   56856 event.go:294] "Event occurred" object="namespace-1634164707-5347/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-dfdcd"
configmap/test-set-env-config created
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
error: cannot attach to *v1.ConfigMap: selector for *v1.ConfigMap not implemented
has:not implemented
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:not found
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod test-pod does not have a host assigned
has not:pod, type/name or --filename must be specified
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod frontend-bhf7m does not have a host assigned
has not:not found
Successful
message:kubectl exec [POD] [COMMAND] is DEPRECATED and will be removed in a future version. Use kubectl exec [POD] -- [COMMAND] instead.
Error from server (BadRequest): pod frontend-bhf7m does not have a host assigned
has not:pod, type/name or --filename must be specified
pod "test-pod" deleted
replicaset.apps "frontend" deleted
configmap "test-set-env-config" deleted
+++ exit code: 0
Recording: run_create_secret_tests
Running command: run_create_secret_tests

+++ Running case: test-cmd.run_create_secret_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_create_secret_tests
Successful
message:Error from server (NotFound): secrets "mysecret" not found
has:secrets "mysecret" not found
Successful
message:user-specified
has:user-specified
Successful
message:Error from server (NotFound): secrets "mysecret" not found
has:secrets "mysecret" not found
Successful
{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","uid":"f0a576be-32d8-4ce1-8172-e2707439b898","resourceVersion":"1152","creationTimestamp":"2021-10-13T22:38:30Z"}}
Successful
message:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","uid":"f0a576be-32d8-4ce1-8172-e2707439b898","resourceVersion":"1153","creationTimestamp":"2021-10-13T22:38:30Z"},"data":{"key1":"config1"}}
has:uid
Successful
message:{"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-update-cm","namespace":"default","uid":"f0a576be-32d8-4ce1-8172-e2707439b898","resourceVersion":"1153","creationTimestamp":"2021-10-13T22:38:30Z"},"data":{"key1":"config1"}}
has:config1
{"kind":"Status","apiVersion":"v1","metadata":{},"status":"Success","details":{"name":"tester-update-cm","kind":"configmaps","uid":"f0a576be-32d8-4ce1-8172-e2707439b898"}}
Successful
message:Error from server (NotFound): configmaps "tester-update-cm" not found
has:configmaps "tester-update-cm" not found
+++ exit code: 0
Recording: run_kubectl_create_kustomization_directory_tests
Running command: run_kubectl_create_kustomization_directory_tests

+++ Running case: test-cmd.run_kubectl_create_kustomization_directory_tests 
... skipping 73 lines ...
      securityContext: {}
      terminationGracePeriodSeconds: 30
status: {}
has:apps/v1beta1
deployment.apps "nginx" deleted
Successful
message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
Successful
message:nginx:
has:nginx:
+++ exit code: 0
Recording: run_kubectl_delete_allnamespaces_tests
... skipping 104 lines ...
has:Timeout
Successful
message:NAME        READY   STATUS    RESTARTS   AGE
valid-pod   0/1     Pending   0          2s
has:valid-pod
Successful
message:error: Invalid timeout value. Timeout must be a single integer in seconds, or an integer followed by a corresponding time unit (e.g. 1s | 2m | 3h)
has:Invalid timeout value
pod "valid-pod" deleted
+++ exit code: 0
Recording: run_crd_tests
Running command: run_crd_tests

... skipping 161 lines ...
(BFlag --record has been deprecated, --record will be removed in the future
foo.company.com/test patched
crd.sh:282: Successful get foos/test {{.patched}}: value2
(BFlag --record has been deprecated, --record will be removed in the future
foo.company.com/test patched
crd.sh:284: Successful get foos/test {{.patched}}: <no value>
(B+++ [1013 22:38:49] "kubectl patch --local" returns error as expected for CustomResource: error: strategic merge patch is not supported for company.com/v1, Kind=Foo locally, try --type merge
{
    "apiVersion": "company.com/v1",
    "kind": "Foo",
    "metadata": {
        "annotations": {
            "kubernetes.io/change-cause": "kubectl patch foos/test --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true --patch={\"patched\":null} --type=merge --record=true"
... skipping 312 lines ...
(Bcrd.sh:505: Successful get bars {{range.items}}{{.metadata.name}}:{{end}}: 
(Bnamespace/non-native-resources created
bar.company.com/test created
crd.sh:510: Successful get bars {{len .items}}: 1
(Bnamespace "non-native-resources" deleted
crd.sh:513: Successful get bars {{len .items}}: 0
(BError from server (NotFound): namespaces "non-native-resources" not found
customresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted
customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted
customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
customresourcedefinition.apiextensions.k8s.io "validfoos.company.com" deleted
+++ exit code: 0
+++ [1013 22:39:07] Testing recursive resources
+++ [1013 22:39:07] Creating namespace namespace-1634164747-11025
namespace/namespace-1634164747-11025 created
Context "test" modified.
generic-resources.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BW1013 22:39:08.174662   53334 cacher.go:150] Terminating all watchers from cacher *unstructured.Unstructured
E1013 22:39:08.176429   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:pod/busybox0 created
pod/busybox1 created
error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
has:error validating data: kind not set
W1013 22:39:08.297163   53334 cacher.go:150] Terminating all watchers from cacher *unstructured.Unstructured
E1013 22:39:08.298767   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:211: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BW1013 22:39:08.424886   53334 cacher.go:150] Terminating all watchers from cacher *unstructured.Unstructured
E1013 22:39:08.426537   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
W1013 22:39:08.564885   53334 cacher.go:150] Terminating all watchers from cacher *unstructured.Unstructured
E1013 22:39:08.566497   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:220: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox:
(BSuccessful
message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:227: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BE1013 22:39:09.079586   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:231: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
(BSuccessful
message:pod/busybox0 replaced
pod/busybox1 replaced
error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
has:error validating data: kind not set
generic-resources.sh:236: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:Name:         busybox0
Namespace:    namespace-1634164747-11025
Priority:     0
Node:         <none>
... skipping 159 lines ...
has:Object 'Kind' is missing
generic-resources.sh:246: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:250: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue:
(BSuccessful
message:pod/busybox0 annotated
pod/busybox1 annotated
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
E1013 22:39:09.882058   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:255: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BE1013 22:39:10.134351   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:259: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
(BSuccessful
message:Warning: resource pods/busybox0 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.
pod/busybox0 configured
Warning: resource pods/busybox1 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.
pod/busybox1 configured
error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
has:error validating data: kind not set
generic-resources.sh:264: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:busybox0:busybox1:
Successful
message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:273: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bpod/busybox0 labeled
pod/busybox1 labeled
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
generic-resources.sh:278: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue:
(BSuccessful
message:pod/busybox0 labeled
pod/busybox1 labeled
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:283: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bpod/busybox0 patched
pod/busybox1 patched
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
generic-resources.sh:288: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox:
(BSuccessful
message:pod/busybox0 patched
pod/busybox1 patched
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:293: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:297: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "busybox0" force deleted
pod "busybox1" force deleted
error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
has:Object 'Kind' is missing
generic-resources.sh:302: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(BI1013 22:39:11.749265   56856 shared_informer.go:240] Waiting for caches to sync for resource quota
I1013 22:39:11.749329   56856 shared_informer.go:247] Caches are synced for resource quota 
E1013 22:39:11.800127   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1013 22:39:11.820345   56856 namespace_controller.go:185] Namespace has been deleted non-native-resources
replicationcontroller/busybox0 created
replicationcontroller/busybox1 created
I1013 22:39:11.991965   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/busybox0" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-p4g4z"
error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I1013 22:39:12.010257   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/busybox1" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-ghmjs"
I1013 22:39:12.024296   56856 shared_informer.go:240] Waiting for caches to sync for garbage collector
I1013 22:39:12.024374   56856 shared_informer.go:247] Caches are synced for garbage collector 
generic-resources.sh:306: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:311: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BE1013 22:39:12.253283   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:39:12.298730   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:312: Successful get rc busybox0 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:313: Successful get rc busybox1 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:318: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
(Bgeneric-resources.sh:319: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
(BSuccessful
message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled
horizontalpodautoscaler.autoscaling/busybox1 autoscaled
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
horizontalpodautoscaler.autoscaling "busybox0" deleted
horizontalpodautoscaler.autoscaling "busybox1" deleted
generic-resources.sh:327: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:328: Successful get rc busybox0 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:329: Successful get rc busybox1 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:333: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
(Bgeneric-resources.sh:334: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
(BSuccessful
message:service/busybox0 exposed
service/busybox1 exposed
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
generic-resources.sh:340: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:341: Successful get rc busybox0 {{.spec.replicas}}: 1
(Bgeneric-resources.sh:342: Successful get rc busybox1 {{.spec.replicas}}: 1
(BI1013 22:39:14.173080   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/busybox0" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-sjtcf"
I1013 22:39:14.236391   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/busybox1" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-gqvqm"
generic-resources.sh:346: Successful get rc busybox0 {{.spec.replicas}}: 2
(Bgeneric-resources.sh:347: Successful get rc busybox1 {{.spec.replicas}}: 2
(BSuccessful
message:replicationcontroller/busybox0 scaled
replicationcontroller/busybox1 scaled
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
generic-resources.sh:352: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(Bgeneric-resources.sh:356: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(BSuccessful
message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
replicationcontroller "busybox0" force deleted
replicationcontroller "busybox1" force deleted
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
generic-resources.sh:361: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
(Bdeployment.apps/nginx1-deployment created
deployment.apps/nginx0-deployment created
error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I1013 22:39:15.301529   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/nginx1-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx1-deployment-758b5949b6 to 2"
I1013 22:39:15.319075   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/nginx1-deployment-758b5949b6" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx1-deployment-758b5949b6-v88h4"
I1013 22:39:15.319129   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/nginx0-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx0-deployment-75db9cdfd9 to 2"
I1013 22:39:15.334451   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/nginx0-deployment-75db9cdfd9" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx0-deployment-75db9cdfd9-9hztc"
I1013 22:39:15.337842   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/nginx1-deployment-758b5949b6" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx1-deployment-758b5949b6-wjhlf"
I1013 22:39:15.410164   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/nginx0-deployment-75db9cdfd9" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx0-deployment-75db9cdfd9-fnjr9"
generic-resources.sh:365: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment:
(BE1013 22:39:15.502127   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:39:15.578768   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:366: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
(Bgeneric-resources.sh:370: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
(BSuccessful
message:deployment.apps/nginx1-deployment skipped rollback (current template already matches revision 1)
deployment.apps/nginx0-deployment skipped rollback (current template already matches revision 1)
error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:Object 'Kind' is missing
deployment.apps/nginx1-deployment paused
deployment.apps/nginx0-deployment paused
generic-resources.sh:378: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true:
(BSuccessful
message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
... skipping 10 lines ...
1         <none>

deployment.apps/nginx0-deployment 
REVISION  CHANGE-CAUSE
1         <none>

error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:nginx0-deployment
Successful
message:deployment.apps/nginx1-deployment 
REVISION  CHANGE-CAUSE
1         <none>

deployment.apps/nginx0-deployment 
REVISION  CHANGE-CAUSE
1         <none>

error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:nginx1-deployment
Successful
message:deployment.apps/nginx1-deployment 
REVISION  CHANGE-CAUSE
1         <none>

deployment.apps/nginx0-deployment 
REVISION  CHANGE-CAUSE
1         <none>

error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
has:Object 'Kind' is missing
warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
deployment.apps "nginx1-deployment" force deleted
deployment.apps "nginx0-deployment" force deleted
error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"apps/v1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"selector":{"matchLabels":{"app":"nginx2"}},"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
E1013 22:39:17.182697   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:400: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(Breplicationcontroller/busybox0 created
replicationcontroller/busybox1 created
I1013 22:39:18.014825   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/busybox0" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox0-fl65c"
error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I1013 22:39:18.032457   56856 event.go:294] "Event occurred" object="namespace-1634164747-11025/busybox1" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: busybox1-v6q8j"
generic-resources.sh:404: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
(BSuccessful
message:no rollbacker has been implemented for "ReplicationController"
no rollbacker has been implemented for "ReplicationController"
unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
... skipping 2 lines ...
message:no rollbacker has been implemented for "ReplicationController"
no rollbacker has been implemented for "ReplicationController"
unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
has:Object 'Kind' is missing
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" pausing is not supported
error: replicationcontrollers "busybox1" pausing is not supported
has:Object 'Kind' is missing
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" pausing is not supported
error: replicationcontrollers "busybox1" pausing is not supported
has:replicationcontrollers "busybox0" pausing is not supported
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" pausing is not supported
error: replicationcontrollers "busybox1" pausing is not supported
has:replicationcontrollers "busybox1" pausing is not supported
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" resuming is not supported
error: replicationcontrollers "busybox1" resuming is not supported
has:Object 'Kind' is missing
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" resuming is not supported
error: replicationcontrollers "busybox1" resuming is not supported
has:replicationcontrollers "busybox0" resuming is not supported
Successful
message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
error: replicationcontrollers "busybox0" resuming is not supported
error: replicationcontrollers "busybox1" resuming is not supported
has:replicationcontrollers "busybox1" resuming is not supported
warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
replicationcontroller "busybox0" force deleted
replicationcontroller "busybox1" force deleted
error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
Recording: run_namespace_tests
Running command: run_namespace_tests

+++ Running case: test-cmd.run_namespace_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_namespace_tests
+++ [1013 22:39:19] Testing kubectl(v1:namespaces)
Successful
message:Error from server (NotFound): namespaces "my-namespace" not found
has: not found
namespace/my-namespace created (dry run)
namespace/my-namespace created (server dry run)
Successful
message:Error from server (NotFound): namespaces "my-namespace" not found
has: not found
namespace/my-namespace created
core.sh:1471: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
(Bquery for namespaces had limit param
query for resourcequotas had limit param
query for limitranges had limit param
... skipping 123 lines ...
I1013 22:39:20.731010   74066 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1634164721-2905/resourcequotas?limit=500 200 OK in 1 milliseconds
I1013 22:39:20.732527   74066 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1634164721-2905/limitranges?limit=500 200 OK in 1 milliseconds
I1013 22:39:20.734412   74066 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1634164747-11025 200 OK in 1 milliseconds
I1013 22:39:20.735837   74066 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1634164747-11025/resourcequotas?limit=500 200 OK in 1 milliseconds
I1013 22:39:20.737156   74066 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/namespace-1634164747-11025/limitranges?limit=500 200 OK in 1 milliseconds
(Bnamespace "my-namespace" deleted
E1013 22:39:22.220018   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:39:22.803194   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:39:24.972513   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
namespace/my-namespace condition met
Successful
message:Error from server (NotFound): namespaces "my-namespace" not found
has: not found
namespace/my-namespace created
core.sh:1482: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
(BSuccessful
message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
namespace "kube-node-lease" deleted
... skipping 31 lines ...
namespace "namespace-1634164715-22196" deleted
namespace "namespace-1634164715-29413" deleted
namespace "namespace-1634164717-17471" deleted
namespace "namespace-1634164719-18180" deleted
namespace "namespace-1634164721-2905" deleted
namespace "namespace-1634164747-11025" deleted
Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
has:warning: deleting cluster-scoped resources
Successful
message:warning: deleting cluster-scoped resources, not scoped to the provided namespace
namespace "kube-node-lease" deleted
namespace "my-namespace" deleted
namespace "namespace-1634164515-15152" deleted
... skipping 29 lines ...
namespace "namespace-1634164715-22196" deleted
namespace "namespace-1634164715-29413" deleted
namespace "namespace-1634164717-17471" deleted
namespace "namespace-1634164719-18180" deleted
namespace "namespace-1634164721-2905" deleted
namespace "namespace-1634164747-11025" deleted
Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-public" is forbidden: this namespace may not be deleted
Error from server (Forbidden): namespaces "kube-system" is forbidden: this namespace may not be deleted
has:namespace "my-namespace" deleted
namespace/quotas created
I1013 22:39:27.543328   56856 horizontal.go:361] Horizontal Pod Autoscaler busybox0 has been deleted in namespace-1634164747-11025
I1013 22:39:27.547852   56856 horizontal.go:361] Horizontal Pod Autoscaler busybox1 has been deleted in namespace-1634164747-11025
core.sh:1489: Successful get namespaces/quotas {{.metadata.name}}: quotas
(Bcore.sh:1490: Successful get quota --namespace=quotas {{range.items}}{{ if eq .metadata.name \"test-quota\" }}found{{end}}{{end}}:: :
... skipping 23 lines ...
core.sh:1523: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(BI1013 22:39:37.331303   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164518-27178
I1013 22:39:37.340672   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164515-28517
I1013 22:39:37.357292   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164516-2134
core.sh:1525: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(BSuccessful
message:error: a resource cannot be retrieved by name across all namespaces
has:a resource cannot be retrieved by name across all namespaces
core.sh:1532: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(BI1013 22:39:37.644290   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164531-25532
I1013 22:39:37.688297   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164554-27745
warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I1013 22:39:37.740061   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164550-15971
... skipping 21 lines ...
I1013 22:39:40.193340   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164685-25572
I1013 22:39:40.226616   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164685-31064
I1013 22:39:40.353318   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164696-9883
I1013 22:39:40.634330   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164706-13409
I1013 22:39:40.797823   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164715-22196
I1013 22:39:40.861344   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164707-5347
E1013 22:39:40.897927   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1013 22:39:40.937088   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164715-29413
I1013 22:39:40.937107   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164717-17471
I1013 22:39:41.277673   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164719-18180
I1013 22:39:41.288005   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164721-2905
I1013 22:39:41.395494   56856 namespace_controller.go:185] Namespace has been deleted quotas
I1013 22:39:41.523561   56856 namespace_controller.go:185] Namespace has been deleted namespace-1634164747-11025
E1013 22:39:42.242051   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:39:42.282424   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
+++ exit code: 0
Recording: run_secrets_test
Running command: run_secrets_test

+++ Running case: test-cmd.run_secrets_test 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 103 lines ...
(Bcore.sh:42: Successful get configmaps {{range.items}}{{ if eq .metadata.name \"test-binary-configmap\" }}found{{end}}{{end}}:: :
(Bconfigmap/test-configmap created (dry run)
configmap/test-configmap created (server dry run)
core.sh:46: Successful get configmaps {{range.items}}{{ if eq .metadata.name \"test-configmap\" }}found{{end}}{{end}}:: :
(Bconfigmap/test-configmap created
configmap/test-binary-configmap created
E1013 22:39:54.384020   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
core.sh:51: Successful get configmap/test-configmap --namespace=test-configmaps {{.metadata.name}}: test-configmap
(Bcore.sh:52: Successful get configmap/test-binary-configmap --namespace=test-configmaps {{.metadata.name}}: test-binary-configmap
(Bquery for configmaps had limit param
query for events had limit param
query for configmaps had user-specified limit param
Successful describe configmaps verbose logs:
... skipping 19 lines ...
+++ command: run_client_config_tests
+++ [1013 22:40:00] Creating namespace namespace-1634164800-20038
namespace/namespace-1634164800-20038 created
Context "test" modified.
+++ [1013 22:40:00] Testing client config
Successful
message:error: stat missing: no such file or directory
has:missing: no such file or directory
Successful
message:error: stat missing: no such file or directory
has:missing: no such file or directory
Successful
message:error: stat missing: no such file or directory
has:missing: no such file or directory
Successful
message:Error in configuration: context was not found for specified context: missing-context
has:context was not found for specified context: missing-context
Successful
message:error: no server found for cluster "missing-cluster"
has:no server found for cluster "missing-cluster"
Successful
message:error: auth info "missing-user" does not exist
has:auth info "missing-user" does not exist
Successful
message:error: error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1" in scheme "k8s.io/client-go/tools/clientcmd/api/latest/latest.go:50"
has:error loading config file
Successful
message:error: stat missing-config: no such file or directory
has:no such file or directory
+++ exit code: 0
Recording: run_service_accounts_tests
Running command: run_service_accounts_tests

+++ Running case: test-cmd.run_service_accounts_tests 
... skipping 58 lines ...
Labels:                        <none>
Annotations:                   <none>
Schedule:                      59 23 31 2 *
Concurrency Policy:            Allow
Suspend:                       False
Successful Job History Limit:  3
Failed Job History Limit:      1
Starting Deadline Seconds:     <unset>
Selector:                      <unset>
Parallelism:                   <unset>
Completions:                   <unset>
Pod Template:
  Labels:  <none>
... skipping 53 lines ...
                  job-name=test-job
Annotations:      cronjob.kubernetes.io/instantiate: manual
Parallelism:      1
Completions:      1
Completion Mode:  NonIndexed
Start Time:       Wed, 13 Oct 2021 22:40:10 +0000
Pods Statuses:    1 Running / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  controller-uid=b5e0d8e3-1b86-49ba-81c1-b2e3867d81d7
           job-name=test-job
  Containers:
   pi:
    Image:      k8s.gcr.io/perl
... skipping 25 lines ...
I1013 22:40:10.438529   76368 round_trippers.go:541] GET https://127.0.0.1:6443/apis/batch/v1/namespaces/test-jobs/jobs/test-job 200 OK in 2 milliseconds
I1013 22:40:10.446779   76368 round_trippers.go:541] GET https://127.0.0.1:6443/api/v1/namespaces/test-jobs/events?fieldSelector=involvedObject.name%3Dtest-job%2CinvolvedObject.namespace%3Dtest-jobs%2CinvolvedObject.kind%3DJob%2CinvolvedObject.uid%3Db5e0d8e3-1b86-49ba-81c1-b2e3867d81d7&limit=500 200 OK in 2 milliseconds
(BI1013 22:40:10.636911   56856 job_controller.go:435] enqueueing job test-jobs/test-job
E1013 22:40:10.637046   56856 tracking_utils.go:109] "deleting tracking annotation UID expectations" err="couldn't create key for object test-jobs/test-job: could not find key for obj \"test-jobs/test-job\"" job="test-jobs/test-job"
job.batch "test-job" deleted
cronjob.batch "pi" deleted
E1013 22:40:10.805502   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
namespace "test-jobs" deleted
I1013 22:40:12.745516   56856 namespace_controller.go:185] Namespace has been deleted test-service-accounts
E1013 22:40:13.456197   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
+++ exit code: 0
Recording: run_create_job_tests
Running command: run_create_job_tests

+++ Running case: test-cmd.run_create_job_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 430 lines ...
  type: ClusterIP
status:
  loadBalancer: {}
Successful
message:kubectl-create kubectl-set
has:kubectl-set
error: you must specify resources by --filename when --local is set.
Example resource specifications include:
   '-f rsrc.yaml'
   '--filename=rsrc.json'
core.sh:1034: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
(Bservice/redis-master selector updated
Successful
message:Error from server (Conflict): Operation cannot be fulfilled on services "redis-master": the object has been modified; please apply your changes to the latest version and try again
has:Conflict
core.sh:1047: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
(Bservice "redis-master" deleted
core.sh:1054: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
(Bcore.sh:1058: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:
(Bservice/redis-master created
... skipping 108 lines ...
daemonset.apps/bind created
apps.sh:75: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"apps/v1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true"},"labels":{"service":"bind"},"name":"bind","namespace":"namespace-1634164831-18038"},"spec":{"selector":{"matchLabels":{"service":"bind"}},"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
 kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true]:
(Bdaemonset.apps/bind skipped rollback (current template already matches revision 1)
apps.sh:78: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:79: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(BE1013 22:40:31.975839   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Flag --record has been deprecated, --record will be removed in the future
daemonset.apps/bind configured
apps.sh:82: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
(Bapps.sh:83: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bapps.sh:84: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
(Bapps.sh:85: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"apps/v1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true"},"labels":{"service":"bind"},"name":"bind","namespace":"namespace-1634164831-18038"},"spec":{"selector":{"matchLabels":{"service":"bind"}},"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
... skipping 15 lines ...
(Bapps.sh:90: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bapps.sh:91: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
(Bdaemonset.apps/bind rolled back
apps.sh:94: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:95: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(BSuccessful
message:error: unable to find specified revision 1000000 in history
has:unable to find specified revision
apps.sh:99: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:100: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(Bdaemonset.apps/bind rolled back
apps.sh:103: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
(Bapps.sh:104: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
... skipping 36 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 12 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 27 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 11 lines ...
Namespace:    namespace-1634164834-10562
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 25 lines ...
(Bcore.sh:1240: Successful get rc frontend {{.spec.replicas}}: 3
(Breplicationcontroller/frontend scaled
E1013 22:40:37.703077   56856 replica_set.go:205] ReplicaSet has no controller: &ReplicaSet{ObjectMeta:{frontend  namespace-1634164834-10562  e3547a16-4446-472c-bdda-d2a0b63befbd 2080 2 2021-10-13 22:40:36 +0000 UTC <nil> <nil> map[app:guestbook tier:frontend] map[] [] []  [{kubectl Update v1 <nil> FieldsV1 {"f:spec":{"f:replicas":{}}} scale} {kube-controller-manager Update v1 2021-10-13 22:40:36 +0000 UTC FieldsV1 {"f:status":{"f:fullyLabeledReplicas":{},"f:observedGeneration":{},"f:replicas":{}}} status} {kubectl-create Update v1 2021-10-13 22:40:36 +0000 UTC FieldsV1 {"f:metadata":{"f:labels":{".":{},"f:app":{},"f:tier":{}}},"f:spec":{"f:selector":{},"f:template":{".":{},"f:metadata":{".":{},"f:creationTimestamp":{},"f:labels":{".":{},"f:app":{},"f:tier":{}}},"f:spec":{".":{},"f:containers":{".":{},"k:{\"name\":\"php-redis\"}":{".":{},"f:env":{".":{},"k:{\"name\":\"GET_HOSTS_FROM\"}":{".":{},"f:name":{},"f:value":{}}},"f:image":{},"f:imagePullPolicy":{},"f:name":{},"f:ports":{".":{},"k:{\"containerPort\":80,\"protocol\":\"TCP\"}":{".":{},"f:containerPort":{},"f:protocol":{}}},"f:resources":{".":{},"f:requests":{".":{},"f:cpu":{},"f:memory":{}}},"f:terminationMessagePath":{},"f:terminationMessagePolicy":{}}},"f:dnsPolicy":{},"f:restartPolicy":{},"f:schedulerName":{},"f:securityContext":{},"f:terminationGracePeriodSeconds":{}}}}} }]},Spec:ReplicaSetSpec{Replicas:*2,Selector:&v1.LabelSelector{MatchLabels:map[string]string{app: guestbook,tier: frontend,},MatchExpressions:[]LabelSelectorRequirement{},},Template:{{      0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[app:guestbook tier:frontend] map[] [] []  []} {[] [] [{php-redis gcr.io/google_samples/gb-frontend:v4 [] []  [{ 0 80 TCP }] [] [{GET_HOSTS_FROM dns nil}] {map[] map[cpu:{{100 -3} {<nil>} 100m DecimalSI} memory:{{104857600 0} {<nil>} 100Mi BinarySI}]} [] [] nil nil nil nil /dev/termination-log File IfNotPresent nil false false false}] [] Always 0xc002ea7618 <nil> ClusterFirst map[]   <nil>  false false false <nil> PodSecurityContext{SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,SupplementalGroups:[],FSGroup:nil,RunAsGroup:nil,Sysctls:[]Sysctl{},WindowsOptions:nil,FSGroupChangePolicy:nil,SeccompProfile:nil,} []   nil default-scheduler [] []  <nil> nil [] <nil> <nil> <nil> map[] [] <nil>}},MinReadySeconds:0,},Status:ReplicaSetStatus{Replicas:3,FullyLabeledReplicas:3,ObservedGeneration:1,ReadyReplicas:0,AvailableReplicas:0,Conditions:[]ReplicaSetCondition{},},}
I1013 22:40:37.767067   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/frontend" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: frontend-55tn6"
core.sh:1244: Successful get rc frontend {{.spec.replicas}}: 2
(Bcore.sh:1248: Successful get rc frontend {{.spec.replicas}}: 2
(Berror: Expected replicas to be 3, was 2
core.sh:1252: Successful get rc frontend {{.spec.replicas}}: 2
(Bcore.sh:1256: Successful get rc frontend {{.spec.replicas}}: 2
(Breplicationcontroller/frontend scaled
I1013 22:40:38.382115   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/frontend" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-jhlql"
core.sh:1260: Successful get rc frontend {{.spec.replicas}}: 3
(Bcore.sh:1264: Successful get rc frontend {{.spec.replicas}}: 3
... skipping 31 lines ...
(Bdeployment.apps "nginx-deployment" deleted
Successful
message:service/expose-test-deployment exposed
has:service/expose-test-deployment exposed
service "expose-test-deployment" deleted
Successful
message:error: couldn't retrieve selectors via --selector flag or introspection: invalid deployment: no selectors, therefore cannot be exposed
See 'kubectl expose -h' for help and examples
has:invalid deployment: no selectors
deployment.apps/nginx-deployment created
I1013 22:40:41.822277   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-76b5cd66f5 to 3"
I1013 22:40:41.844865   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-76b5cd66f5" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-76b5cd66f5-v8jjs"
I1013 22:40:41.921485   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-76b5cd66f5" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-76b5cd66f5-9xc9t"
... skipping 20 lines ...
(Bpod "valid-pod" deleted
service "frontend" deleted
service "frontend-2" deleted
service "frontend-3" deleted
service "frontend-4" deleted
Successful
message:error: cannot expose a Node
has:cannot expose
Successful
message:The Service "invalid-large-service-name-that-has-more-than-sixty-three-characters" is invalid: metadata.name: Invalid value: "invalid-large-service-name-that-has-more-than-sixty-three-characters": must be no more than 63 characters
has:metadata.name: Invalid value
Successful
message:service/kubernetes-serve-hostname-testing-sixty-three-characters-in-len exposed
... skipping 30 lines ...
(Bhorizontalpodautoscaler.autoscaling/frontend autoscaled
core.sh:1403: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 70
(Bhorizontalpodautoscaler.autoscaling "frontend" deleted
horizontalpodautoscaler.autoscaling/frontend autoscaled
core.sh:1407: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
(Bhorizontalpodautoscaler.autoscaling "frontend" deleted
E1013 22:40:48.401123   80002 run.go:120] "command failed" err="required flag(s) \"max\" not set"
replicationcontroller "frontend" deleted
core.sh:1416: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
(BapiVersion: apps/v1
kind: Deployment
metadata:
  creationTimestamp: null
... skipping 24 lines ...
          limits:
            cpu: 300m
          requests:
            cpu: 300m
      terminationGracePeriodSeconds: 0
status: {}
Error from server (NotFound): deployments.apps "nginx-deployment-resources" not found
E1013 22:40:48.883473   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
deployment.apps/nginx-deployment-resources created
I1013 22:40:49.104025   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-748ddcb48b to 3"
I1013 22:40:49.117354   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-748ddcb48b-g8q8g"
I1013 22:40:49.130339   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-748ddcb48b-ffncx"
I1013 22:40:49.130683   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-748ddcb48b-5xwv8"
core.sh:1422: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment-resources:
(Bcore.sh:1423: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bcore.sh:1424: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Bdeployment.apps/nginx-deployment-resources resource requirements updated
I1013 22:40:49.578261   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-7bfb7d56b6 to 1"
I1013 22:40:49.589768   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources-7bfb7d56b6" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-7bfb7d56b6-k95wl"
core.sh:1427: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 100m:
(Bcore.sh:1428: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 100m:
(Berror: unable to find container named redis
deployment.apps/nginx-deployment-resources resource requirements updated
I1013 22:40:50.033753   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-resources-748ddcb48b to 2"
I1013 22:40:50.100179   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-resources-75dbcccf44 to 1"
core.sh:1433: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m:
(BI1013 22:40:50.116070   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources-748ddcb48b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-deployment-resources-748ddcb48b-g8q8g"
I1013 22:40:50.117598   56856 event.go:294] "Event occurred" object="namespace-1634164834-10562/nginx-deployment-resources-75dbcccf44" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-resources-75dbcccf44-gztsz"
... skipping 155 lines ...
    status: "True"
    type: Progressing
  observedGeneration: 4
  replicas: 4
  unavailableReplicas: 4
  updatedReplicas: 1
error: you must specify resources by --filename when --local is set.
Example resource specifications include:
   '-f rsrc.yaml'
   '--filename=rsrc.json'
core.sh:1444: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m:
(Bcore.sh:1445: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 300m:
(Bcore.sh:1446: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.requests.cpu}}:{{end}}: 300m:
... skipping 3 lines ...
Running command: run_deployment_tests

+++ Running case: test-cmd.run_deployment_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_deployment_tests
+++ [1013 22:40:51] Creating namespace namespace-1634164851-9460
E1013 22:40:51.712464   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
namespace/namespace-1634164851-9460 created
Context "test" modified.
+++ [1013 22:40:51] Testing deployments
deployment.apps/test-nginx-extensions created
I1013 22:40:51.888521   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/test-nginx-extensions" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set test-nginx-extensions-558656bd4b to 1"
I1013 22:40:51.917404   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/test-nginx-extensions-558656bd4b" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: test-nginx-extensions-558656bd4b-dfjbx"
... skipping 31 lines ...
                pod-template-hash=69dd6dcd84
Annotations:    deployment.kubernetes.io/desired-replicas: 1
                deployment.kubernetes.io/max-replicas: 2
                deployment.kubernetes.io/revision: 1
Controlled By:  Deployment/test-nginx-apps
Replicas:       1 current / 1 desired
Pods Status:    0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=test-nginx-apps
           pod-template-hash=69dd6dcd84
  Containers:
   nginx:
    Image:        k8s.gcr.io/nginx:test-cmd
... skipping 118 lines ...
apps.sh:308: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(BWarning: resource deployments/nginx is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically.
deployment.apps/nginx configured
I1013 22:40:59.679335   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-8666979fc8 to 1"
I1013 22:40:59.755436   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-8666979fc8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-8666979fc8-ggxzl"
apps.sh:311: Successful get deployment.apps {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(BE1013 22:40:59.969306   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
    Image:	k8s.gcr.io/nginx:test-cmd
deployment.apps/nginx rolled back (server dry run)
apps.sh:315: Successful get deployment.apps {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bdeployment.apps/nginx rolled back
apps.sh:319: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Berror: unable to find specified revision 1000000 in history
apps.sh:322: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bdeployment.apps/nginx rolled back
I1013 22:41:02.794722   56856 horizontal.go:361] Horizontal Pod Autoscaler frontend has been deleted in namespace-1634164834-10562
apps.sh:326: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bdeployment.apps/nginx paused
error: you cannot rollback a paused deployment; resume it first with 'kubectl rollout resume deployment/nginx' and try again
error: deployments.apps "nginx" can't restart paused deployment (run rollout resume first)
deployment.apps/nginx resumed
deployment.apps/nginx rolled back
    deployment.kubernetes.io/revision-history: 1,3
error: desired revision (3) is different from the running revision (5)
deployment.apps/nginx restarted
I1013 22:41:04.105259   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-54785cbcb8 to 2"
I1013 22:41:04.179905   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-54785cbcb8" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-54785cbcb8-r49pd"
I1013 22:41:04.181196   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-57df976b84 to 1"
I1013 22:41:04.192460   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-57df976b84" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-57df976b84-mzgrz"
Successful
... skipping 80 lines ...
(Bapps.sh:370: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Bdeployment.apps/nginx-deployment image updated
I1013 22:41:07.248223   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-6dd48b9849 to 1"
I1013 22:41:07.259236   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-deployment-6dd48b9849" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: nginx-deployment-6dd48b9849-h5h9g"
apps.sh:373: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bapps.sh:374: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Berror: unable to find container named "redis"
deployment.apps/nginx-deployment image updated
apps.sh:379: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
(Bapps.sh:380: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
(Bdeployment.apps/nginx-deployment image updated
apps.sh:383: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
(Bapps.sh:384: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
... skipping 52 lines ...
deployment.apps/nginx-deployment env updated
I1013 22:41:12.881134   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-7f789d7c5f to 0"
deployment.apps/nginx-deployment env updated
I1013 22:41:12.903651   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled up replica set nginx-deployment-57ddd474c4 to 1"
I1013 22:41:12.906656   56856 horizontal.go:361] Horizontal Pod Autoscaler nginx-deployment has been deleted in namespace-1634164851-9460
Successful
message:error: standard input cannot be used for multiple arguments
has:standard input cannot be used for multiple arguments
I1013 22:41:13.068942   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-deployment" kind="Deployment" apiVersion="apps/v1" type="Normal" reason="ScalingReplicaSet" message="Scaled down replica set nginx-deployment-57ddd474c4 to 0"
I1013 22:41:13.080482   56856 event.go:294] "Event occurred" object="namespace-1634164851-9460/nginx-deployment-7f789d7c5f" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulDelete" message="Deleted pod: nginx-deployment-7f789d7c5f-w85sh"
deployment.apps "nginx-deployment" deleted
E1013 22:41:13.347666   56856 replica_set.go:536] sync "namespace-1634164851-9460/nginx-deployment-59b7fccd97" failed with replicasets.apps "nginx-deployment-59b7fccd97" not found
configmap "test-set-env-config" deleted
E1013 22:41:13.397593   56856 replica_set.go:536] sync "namespace-1634164851-9460/nginx-deployment-7584fc66fd" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-7584fc66fd": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1634164851-9460/nginx-deployment-7584fc66fd, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 8550c95c-8095-4a86-ad90-eeaabd272544, UID in object meta: 
E1013 22:41:13.482639   56856 replica_set.go:536] sync "namespace-1634164851-9460/nginx-deployment-57ddd474c4" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-57ddd474c4": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1634164851-9460/nginx-deployment-57ddd474c4, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 20e7a1db-f450-46c1-84af-f83bb6acdc38, UID in object meta: 
secret "test-set-env-secret" deleted
E1013 22:41:13.496510   56856 replica_set.go:536] sync "namespace-1634164851-9460/nginx-deployment-7f789d7c5f" failed with replicasets.apps "nginx-deployment-7f789d7c5f" not found
+++ exit code: 0
Recording: run_rs_tests
Running command: run_rs_tests

+++ Running case: test-cmd.run_rs_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_rs_tests
+++ [1013 22:41:13] Creating namespace namespace-1634164873-27469
namespace/namespace-1634164873-27469 created
Context "test" modified.
+++ [1013 22:41:13] Testing kubectl(v1:replicasets)
apps.sh:553: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
(BE1013 22:41:14.018316   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
replicaset.apps/frontend created
+++ [1013 22:41:14] Deleting rs
I1013 22:41:14.097491   56856 event.go:294] "Event occurred" object="namespace-1634164873-27469/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-lhdt4"
I1013 22:41:14.115234   56856 event.go:294] "Event occurred" object="namespace-1634164873-27469/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-qlczl"
I1013 22:41:14.115458   56856 event.go:294] "Event occurred" object="namespace-1634164873-27469/frontend" kind="ReplicaSet" apiVersion="apps/v1" type="Normal" reason="SuccessfulCreate" message="Created pod: frontend-kwk48"
replicaset.apps "frontend" deleted
... skipping 30 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 18 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 12 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 25 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 11 lines ...
Namespace:    namespace-1634164873-27469
Selector:     app=guestbook,tier=frontend
Labels:       app=guestbook
              tier=frontend
Annotations:  <none>
Replicas:     3 current / 3 desired
Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=guestbook
           tier=frontend
  Containers:
   php-redis:
    Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 224 lines ...
horizontalpodautoscaler.autoscaling/frontend autoscaled
apps.sh:716: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
(BSuccessful
message:kubectl-autoscale
has:kubectl-autoscale
horizontalpodautoscaler.autoscaling "frontend" deleted
E1013 22:41:26.668758   83683 run.go:120] "command failed" err="required flag(s) \"max\" not set"
replicaset.apps "frontend" deleted
+++ exit code: 0
Recording: run_stateful_set_tests
Running command: run_stateful_set_tests

+++ Running case: test-cmd.run_stateful_set_tests 
... skipping 35 lines ...
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_statefulset_history_tests
+++ [1013 22:41:28] Creating namespace namespace-1634164888-7614
namespace/namespace-1634164888-7614 created
Context "test" modified.
+++ [1013 22:41:28] Testing kubectl(v1:statefulsets, v1:controllerrevisions)
E1013 22:41:29.069061   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
apps.sh:456: Successful get statefulset {{range.items}}{{.metadata.name}}:{{end}}: 
(BFlag --record has been deprecated, --record will be removed in the future
statefulset.apps/nginx created
apps.sh:460: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"apps/v1","kind":"StatefulSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-statefulset.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true"},"labels":{"app":"nginx-statefulset"},"name":"nginx","namespace":"namespace-1634164888-7614"},"spec":{"replicas":0,"selector":{"matchLabels":{"app":"nginx-statefulset"}},"serviceName":"nginx","template":{"metadata":{"labels":{"app":"nginx-statefulset"}},"spec":{"containers":[{"command":["sh","-c","while true; do sleep 1; done"],"image":"k8s.gcr.io/nginx-slim:0.7","name":"nginx","ports":[{"containerPort":80,"name":"web"}]}],"terminationGracePeriodSeconds":5}},"updateStrategy":{"type":"RollingUpdate"}}}
 kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-statefulset.yaml --record=true --server=https://127.0.0.1:6443 --insecure-skip-tls-verify=true --match-server-version=true]:
(Bstatefulset.apps/nginx skipped rollback (current template already matches revision 1)
... skipping 27 lines ...
(Bapps.sh:475: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
(Bapps.sh:476: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
(Bstatefulset.apps/nginx rolled back
apps.sh:479: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
(Bapps.sh:480: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(BSuccessful
message:error: unable to find specified revision 1000000 in history
has:unable to find specified revision
apps.sh:484: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
(Bapps.sh:485: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
(Bstatefulset.apps/nginx rolled back
apps.sh:488: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.8:
(Bapps.sh:489: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
... skipping 61 lines ...
Name:         mock
Namespace:    namespace-1634164893-14453
Selector:     app=mock
Labels:       app=mock
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:3.6
    Port:         9949/TCP
... skipping 14 lines ...
(Bgeneric-resources.sh:102: Successful get rc mock {{.metadata.labels.status}}: replaced
(Bservice/mock edited
replicationcontroller/mock edited
generic-resources.sh:114: Successful get services mock {{.metadata.labels.status}}: edited
(Bgeneric-resources.sh:120: Successful get rc mock {{.metadata.labels.status}}: edited
(Bservice/mock labeled
E1013 22:41:35.754892   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
replicationcontroller/mock labeled
generic-resources.sh:134: Successful get services mock {{.metadata.labels.labeled}}: true
(Bgeneric-resources.sh:140: Successful get rc mock {{.metadata.labels.labeled}}: true
(Bservice/mock annotated
replicationcontroller/mock annotated
generic-resources.sh:153: Successful get services mock {{.metadata.annotations.annotated}}: true
... skipping 33 lines ...
Name:         mock
Namespace:    namespace-1634164893-14453
Selector:     app=mock
Labels:       app=mock
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:3.6
    Port:         9949/TCP
... skipping 29 lines ...
Testing with file hack/testdata/multi-resource-json.json and replace with file hack/testdata/multi-resource-json-modify.json
generic-resources.sh:63: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: 
(Bgeneric-resources.sh:64: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
(Bservice/mock created
replicationcontroller/mock created
I1013 22:41:40.387974   56856 event.go:294] "Event occurred" object="namespace-1634164893-14453/mock" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: mock-zbk9p"
E1013 22:41:40.455567   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
generic-resources.sh:72: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: mock:
(Bgeneric-resources.sh:80: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: mock:
(BNAME           TYPE        CLUSTER-IP   EXTERNAL-IP   PORT(S)   AGE
service/mock   ClusterIP   10.0.0.147   <none>        99/TCP    0s

NAME                         DESIRED   CURRENT   READY   AGE
... skipping 19 lines ...
Name:         mock
Namespace:    namespace-1634164893-14453
Selector:     app=mock
Labels:       app=mock
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:3.6
    Port:         9949/TCP
... skipping 41 lines ...
Namespace:    namespace-1634164893-14453
Selector:     app=mock
Labels:       app=mock
              status=replaced
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:3.6
    Port:         9949/TCP
... skipping 11 lines ...
Namespace:    namespace-1634164893-14453
Selector:     app=mock2
Labels:       app=mock2
              status=replaced
Annotations:  <none>
Replicas:     1 current / 1 desired
Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
Pod Template:
  Labels:  app=mock2
  Containers:
   mock-container:
    Image:        k8s.gcr.io/pause:3.6
    Port:         9949/TCP
... skipping 112 lines ...
+++ [1013 22:41:50] Testing persistent volumes
storage.sh:30: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: 
(Bpersistentvolume/pv0001 created
storage.sh:33: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0001:
(Bpersistentvolume "pv0001" deleted
persistentvolume/pv0002 created
E1013 22:41:50.943834   56856 pv_protection_controller.go:118] PV pv0002 failed with : Operation cannot be fulfilled on persistentvolumes "pv0002": the object has been modified; please apply your changes to the latest version and try again
storage.sh:36: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0002:
(Bpersistentvolume "pv0002" deleted
persistentvolume/pv0003 created
E1013 22:41:51.377465   56856 pv_protection_controller.go:118] PV pv0003 failed with : Operation cannot be fulfilled on persistentvolumes "pv0003": the object has been modified; please apply your changes to the latest version and try again
E1013 22:41:51.403451   56856 pv_protection_controller.go:118] PV pv0003 failed with : Operation cannot be fulfilled on persistentvolumes "pv0003": the object has been modified; please apply your changes to the latest version and try again
storage.sh:39: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0003:
(Bquery for persistentvolumes had limit param
query for events had limit param
query for persistentvolumes had user-specified limit param
Successful describe persistentvolumes verbose logs:
I1013 22:41:51.544511   86449 loader.go:372] Config loaded from file:  /tmp/tmp.yBKjjCvlKb/.kube/config
... skipping 101 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 31 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 32 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 31 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 39 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 31 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 31 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 30 lines ...
Labels:             <none>
Annotations:        node.alpha.kubernetes.io/ttl: 0
                    save-managers: true
CreationTimestamp:  Wed, 13 Oct 2021 22:35:13 +0000
Taints:             node.kubernetes.io/unreachable:NoSchedule
Unschedulable:      false
Lease:              Failed to get lease: leases.coordination.k8s.io "127.0.0.1" not found
Conditions:
  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason                   Message
  ----             ------    -----------------                 ------------------                ------                   -------
  Ready            Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  MemoryPressure   Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
  DiskPressure     Unknown   Wed, 13 Oct 2021 22:35:13 +0000   Wed, 13 Oct 2021 22:36:16 +0000   NodeStatusNeverUpdated   Kubelet never posted node status.
... skipping 149 lines ...
yes
has:the server doesn't have a resource type
Successful
message:yes
has:yes
Successful
message:error: --subresource can not be used with NonResourceURL
has:subresource can not be used with NonResourceURL
Successful
Successful
message:yes
0
has:0
Successful
message:0
has:0
E1013 22:42:01.945930   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Successful
message:yes
has not:Warning
Successful
message:Warning: the server doesn't have a resource type 'foo'
yes
... skipping 50 lines ...
		{Verbs:[get list watch] APIGroups:[] Resources:[configmaps] ResourceNames:[] NonResourceURLs:[]}
legacy-script.sh:848: Successful get rolebindings -n some-other-random -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-RB:
(Blegacy-script.sh:849: Successful get roles -n some-other-random -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-R:
(Blegacy-script.sh:850: Successful get clusterrolebindings -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-CRB:
(Blegacy-script.sh:851: Successful get clusterroles -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-CR:
(BSuccessful
message:error: only rbac.authorization.k8s.io/v1 is supported: not *v1beta1.ClusterRole
has:only rbac.authorization.k8s.io/v1 is supported
rolebinding.rbac.authorization.k8s.io "testing-RB" deleted
role.rbac.authorization.k8s.io "testing-R" deleted
warning: deleting cluster-scoped resources, not scoped to the provided namespace
clusterrole.rbac.authorization.k8s.io "testing-CR" deleted
clusterrolebinding.rbac.authorization.k8s.io "testing-CRB" deleted
... skipping 24 lines ...
discovery.sh:91: Successful get all -l'app=cassandra' {{range.items}}{{range .metadata.labels}}{{.}}:{{end}}{{end}}: cassandra:cassandra:cassandra:cassandra:
(BI1013 22:42:05.780481   56856 event.go:294] "Event occurred" object="namespace-1634164924-22204/cassandra" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: cassandra-8dmtq"
pod "cassandra-8qcn6" deleted
pod "cassandra-njvk6" deleted
I1013 22:42:05.872382   56856 event.go:294] "Event occurred" object="namespace-1634164924-22204/cassandra" kind="ReplicationController" apiVersion="v1" type="Normal" reason="SuccessfulCreate" message="Created pod: cassandra-4nsgv"
replicationcontroller "cassandra" deleted
E1013 22:42:05.921863   56856 replica_set.go:536] sync "namespace-1634164924-22204/cassandra" failed with replicationcontrollers "cassandra" not found
service "cassandra" deleted
+++ exit code: 0
Recording: run_kubectl_explain_tests
Running command: run_kubectl_explain_tests

+++ Running case: test-cmd.run_kubectl_explain_tests 
... skipping 230 lines ...
sorted-pod3   1s
has not:Table
get.sh:341: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: sorted-pod1:sorted-pod2:sorted-pod3:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "sorted-pod1" force deleted
pod "sorted-pod2" force deleted
E1013 22:42:10.477902   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
pod "sorted-pod3" force deleted
get.sh:345: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(B+++ exit code: 0
Recording: run_kubectl_all_namespace_tests
Running command: run_kubectl_all_namespace_tests

... skipping 134 lines ...
namespace-1634164912-1404    default   0         20s
namespace-1634164924-22204   default   0         8s
some-other-random            default   0         9s
has:all-ns-test-2
namespace "all-ns-test-1" deleted
namespace "all-ns-test-2" deleted
E1013 22:42:22.176557   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
I1013 22:42:22.439143   56856 namespace_controller.go:185] Namespace has been deleted all-ns-test-1
get.sh:392: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
(Bwarning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
pod "valid-pod" force deleted
get.sh:396: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
(Bget.sh:400: Successful get nodes {{range.items}}{{.metadata.name}}:{{end}}: 127.0.0.1:
... skipping 10 lines ...
message:Warning: policy/v1beta1 PodSecurityPolicy is deprecated in v1.21+, unavailable in v1.25+
No resources found
has:PodSecurityPolicy is deprecated
Successful
message:Warning: policy/v1beta1 PodSecurityPolicy is deprecated in v1.21+, unavailable in v1.25+
No resources found
E1013 22:42:23.632732   89412 run.go:120] "command failed" err="1 warning received"
has:PodSecurityPolicy is deprecated
Successful
message:Warning: policy/v1beta1 PodSecurityPolicy is deprecated in v1.21+, unavailable in v1.25+
No resources found
E1013 22:42:23.632732   89412 run.go:120] "command failed" err="1 warning received"
has:err="1 warning received"
Recording: run_template_output_tests
Running command: run_template_output_tests

+++ Running case: test-cmd.run_template_output_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 444 lines ...
    }
}
certificate.sh:51: Successful get csr/foo {{range.status.conditions}}{{.type}}{{end}}: Denied
(Bcertificatesigningrequest.certificates.k8s.io "foo" deleted
certificate.sh:53: Successful get csr {{range.items}}{{.metadata.name}}{{end}}: 
(Bcertificatesigningrequest.certificates.k8s.io/foo created
E1013 22:42:34.011230   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
certificate.sh:56: Successful get csr/foo {{range.status.conditions}}{{.type}}{{end}}: 
(Bcertificatesigningrequest.certificates.k8s.io/foo denied
{
    "apiVersion": "v1",
    "items": [
        {
... skipping 61 lines ...
node/127.0.0.1 tainted
node-management.sh:95: Successful get nodes 127.0.0.1 {{range .spec.taints}}{{if eq .key \"dedicated\"}}{{.key}}={{.value}}:{{.effect}}{{end}}{{end}}: 
(Bnode/127.0.0.1 tainted
node-management.sh:98: Successful get nodes 127.0.0.1 {{range .spec.taints}}{{if eq .key \"dedicated\"}}{{.key}}={{.value}}:{{.effect}}{{end}}{{end}}: dedicated=foo:PreferNoSchedule
(Bnode/127.0.0.1 untainted
node/127.0.0.1 tainted
E1013 22:42:36.241436   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
node-management.sh:103: Successful get nodes 127.0.0.1 {{range .spec.taints}}{{if eq .key \"dedicated\"}}{{.key}}={{.value}}:{{.effect}}{{end}}{{end}}: dedicated=<no value>:PreferNoSchedule
(BSuccessful
message:kubectl-create kube-controller-manager kube-controller-manager kubectl-taint
has:kubectl-taint
node/127.0.0.1 untainted
node/127.0.0.1 untainted
... skipping 26 lines ...
node/127.0.0.1 cordoned (server dry run)
WARNING: deleting Pods not managed by ReplicationController, ReplicaSet, Job, DaemonSet or StatefulSet: namespace-1634164954-25539/test-pod-1
evicting pod namespace-1634164954-25539/test-pod-1 (server dry run)
node/127.0.0.1 drained (server dry run)
node-management.sh:140: Successful get pods {{range .items}}{{.metadata.name}},{{end}}: test-pod-1,test-pod-2,
(BWARNING: deleting Pods not managed by ReplicationController, ReplicaSet, Job, DaemonSet or StatefulSet: namespace-1634164954-25539/test-pod-1
E1013 22:42:53.186671   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:42:59.772731   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
E1013 22:43:05.138057   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
Successful
message:node/127.0.0.1 cordoned
evicting pod namespace-1634164954-25539/test-pod-1
pod "test-pod-1" has DeletionTimestamp older than 1 seconds, skipping
node/127.0.0.1 drained
has:evicting pod .*/test-pod-1
... skipping 14 lines ...
message:node/127.0.0.1 already uncordoned (server dry run)
has:already uncordoned
node-management.sh:161: Successful get nodes 127.0.0.1 {{.spec.unschedulable}}: <no value>
(Bnode/127.0.0.1 labeled
node-management.sh:166: Successful get nodes 127.0.0.1 {{.metadata.labels.test}}: label
(BSuccessful
message:error: cannot specify both a node name and a --selector option
See 'kubectl drain -h' for help and examples
has:cannot specify both a node name
node-management.sh:172: Successful get nodes 127.0.0.1 {{.metadata.labels.test}}: label
(Bnode-management.sh:174: Successful get nodes 127.0.0.1 {{.spec.unschedulable}}: <no value>
(Bnode-management.sh:176: Successful get pods {{range .items}}{{.metadata.name}},{{end}}: test-pod-1,test-pod-2,
(BSuccessful
... skipping 78 lines ...
WARNING: deleting Pods not managed by ReplicationController, ReplicaSet, Job, DaemonSet or StatefulSet: namespace-1634164954-25539/test-pod-1, namespace-1634164954-25539/test-pod-2
evicting pod namespace-1634164954-25539/test-pod-1 (dry run)
evicting pod namespace-1634164954-25539/test-pod-2 (dry run)
node/127.0.0.1 drained (dry run)
has:/v1/pods?fieldSelector=spec.nodeName%3D127.0.0.1&limit=500 200 OK
Successful
message:error: USAGE: cordon NODE [flags]
See 'kubectl cordon -h' for help and examples
has:error\: USAGE\: cordon NODE
node/127.0.0.1 already uncordoned
Successful
message:error: You must provide one or more resources by argument or filename.
Example resource specifications include:
   '-f rsrc.yaml'
   '--filename=rsrc.json'
   '<resource> <name>'
   '<resource>'
has:must provide one or more resources
... skipping 18 lines ...
+++ [1013 22:43:14] Testing kubectl plugins
Successful
message:The following compatible plugins are available:

test/fixtures/pkg/kubectl/plugins/version/kubectl-version
  - warning: kubectl-version overwrites existing command: "kubectl version"
error: one plugin warning was found
has:kubectl-version overwrites existing command: "kubectl version"
Successful
message:The following compatible plugins are available:

test/fixtures/pkg/kubectl/plugins/kubectl-foo
test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo
  - warning: test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo is overshadowed by a similarly named plugin: test/fixtures/pkg/kubectl/plugins/kubectl-foo
error: one plugin warning was found
has:test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo is overshadowed by a similarly named plugin
Successful
message:The following compatible plugins are available:

test/fixtures/pkg/kubectl/plugins/kubectl-foo
has:plugins are available
Successful
message:Unable to read directory "test/fixtures/pkg/kubectl/plugins/empty" from your PATH: open test/fixtures/pkg/kubectl/plugins/empty: no such file or directory. Skipping...
error: unable to find any kubectl plugins in your PATH
has:unable to find any kubectl plugins in your PATH
Successful
message:I am plugin foo
has:plugin foo
Successful
message:I am plugin bar called with args test/fixtures/pkg/kubectl/plugins/bar/kubectl-bar arg1
... skipping 10 lines ...

+++ Running case: test-cmd.run_impersonation_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
+++ command: run_impersonation_tests
+++ [1013 22:43:14] Testing impersonation
Successful
message:error: requesting groups or user-extra for test-admin without impersonating a user
has:without impersonating a user
certificatesigningrequest.certificates.k8s.io/foo created
authorization.sh:57: Successful get csr/foo {{.spec.username}}: user1
(Bauthorization.sh:58: Successful get csr/foo {{range .spec.groups}}{{.}}{{end}}: system:authenticated
(Bcertificatesigningrequest.certificates.k8s.io "foo" deleted
certificatesigningrequest.certificates.k8s.io/foo created
... skipping 56 lines ...
debug.sh:56: Successful get pod {{range.items}}{{.metadata.name}}:{{end}}: target:
(Bdebug.sh:57: Successful get pod/target {{(index .spec.containers 0).name}}: target
(Bdebug.sh:61: Successful get pod {{range.items}}{{.metadata.name}}:{{end}}: target:target-copy:
(Bdebug.sh:62: Successful get pod/target-copy {{(len .spec.containers)}}:{{(index .spec.containers 0).image}}: 1:busybox
(Bpod "target" deleted
pod "target-copy" deleted
E1013 22:43:21.411478   56856 reflector.go:138] k8s.io/client-go/metadata/metadatainformer/informer.go:90: Failed to watch *v1.PartialObjectMetadata: failed to list *v1.PartialObjectMetadata: the server could not find the requested resource
+++ exit code: 0
Recording: run_kubectl_debug_node_tests
Running command: run_kubectl_debug_node_tests

+++ Running case: test-cmd.run_kubectl_debug_node_tests 
+++ working dir: /home/prow/go/src/k8s.io/kubernetes
... skipping 41 lines ...
I1013 22:43:23.330901   53334 naming_controller.go:302] Shutting down NamingConditionController
I1013 22:43:23.330915   53334 establishing_controller.go:87] Shutting down EstablishingController
I1013 22:43:23.330930   53334 crd_finalizer.go:278] Shutting down CRDFinalizer
I1013 22:43:23.330956   53334 cluster_authentication_trust_controller.go:463] Shutting down cluster_authentication_trust_controller controller
I1013 22:43:23.330973   53334 customresource_discovery_controller.go:245] Shutting down DiscoveryController
I1013 22:43:23.330991   53334 autoregister_controller.go:165] Shutting down autoregister controller
W1013 22:43:23.331585   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.331663   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.331724   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.331781   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.331986   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.332058   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.332117   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.332314   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.332318   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.332389   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.332424   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.333636   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.333755   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.333909   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.333968   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.334030   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.334351   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.334800   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.334855   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335135   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335186   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335236   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335439   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335592   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335629   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
E1013 22:43:23.335649   53334 controller.go:189] Unable to remove endpoints from kubernetes service: Get "https://127.0.0.1:6443/api/v1/namespaces/default/endpoints/kubernetes": dial tcp 127.0.0.1:6443: connect: connection refused
W1013 22:43:23.335693   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335724   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335806   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.335825   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336034   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336214   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336246   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336276   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336300   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336335   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336390   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336511   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336561   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336618   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336737   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336739   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336770   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336962   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.336973   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337008   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337039   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337060   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337102   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337127   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337146   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337149   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337279   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337546   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337596   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337616   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337641   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337672   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337672   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337704   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337714   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337737   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337739   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:23.337742   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
junit report dir: /tmp/junit-results
+++ [1013 22:43:23] Clean up complete
+ make test-integration
W1013 22:43:24.332759   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332820   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332851   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332870   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332894   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332900   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332854   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332917   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.332996   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.333009   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.333086   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.334455   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.334470   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.334536   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.334551   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.334570   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.334607   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.335051   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.335080   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336353   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336376   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336386   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336391   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336416   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336439   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336441   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336450   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336463   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336502   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336505   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336503   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336537   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336545   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336506   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336540   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336542   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336756   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336960   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336969   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.336963   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337011   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337098   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337183   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337187   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337187   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337246   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337259   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337266   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337324   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337382   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337396   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.337451   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338791   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338795   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338901   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338921   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338932   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338967   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.338973   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.339059   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.339078   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.339088   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
W1013 22:43:24.339265   53334 clientconn.go:1326] [core] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 127.0.0.1 <nil> 0 <nil>}. Err: connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting...
+++ [1013 22:43:27] Checking etcd is on PATH
/home/prow/go/src/k8s.io/kubernetes/third_party/etcd/etcd
+++ [1013 22:43:27] Starting etcd instance
etcd --advertise-client-urls http://127.0.0.1:2379 --data-dir /tmp/tmp.xphmVvQlbF --listen-client-urls http://127.0.0.1:2379 --log-level=debug > "/logs/artifacts/etcd.167577c2-2c74-11ec-aa07-9a22fe174cb5.root.log.DEBUG.20211013-224327.93636" 2>/dev/null
Waiting for etcd to come up.
+++ [1013 22:43:28] On try 2, etcd: : {"health":"true","reason":""}
{"header":{"cluster_id":"14841639068965178418","member_id":"10276657743932975437","revision":"2","raft_term":"2"}}+++ [1013 22:43:28] Running integration test cases
+++ [1013 22:43:33] Running tests without code coverage 
{"Time":"2021-10-13T22:46:26.221102472Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver/podlogs","Output":"ok  \tk8s.io/kubernetes/test/integration/apiserver/podlogs\t11.840s\n"}
{"Time":"2021-10-13T22:46:27.755855392Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver/tracing","Output":"ok  \tk8s.io/kubernetes/test/integration/apiserver/tracing\t12.678s\n"}
{"Time":"2021-10-13T22:46:54.405421905Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestSelfSubjectAccessReview","Output":"/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:225 +0xcc\\nnet/http.Error({0x51db438, 0xc005c845a0}, {0xc008bcbd40, 0x60}, 0x2)\\n\\t/usr/local/go/src/net/http/server.go:2058 +0x198\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.InternalError({0x51db438, 0xc005c845a0}, 0xc005c84720, {0x50e96c0, 0xc004058ae0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/errors.go:75 +0xea\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1({0x51db438, 0xc005c845a0}, 0xc003ec2700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authorization.go:69 +0x39f\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc005c845a0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apis"}
{"Time":"2021-10-13T22:46:54.405449331Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestSelfSubjectAccessReview","Output":"filters/priority-and-fairness.go:269 +0x118\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle.func2()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:180 +0x1eb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish.func1(0xc004461c20, 0xc0029aab68, 0x40f087)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:344 +0x65\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish(0xc008390e54, 0x9)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:345 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle(0xc006211e60, {0x51e0af8, 0xc005c84720}, "}
{"Time":"2021-10-13T22:46:54.405478072Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestSelfSubjectAccessReview","Output":"ocal/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc005c845a0}, 0xc003ec2700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0x48bc340, {0x51db438, 0xc005c845a0}, 0xc0058773b8)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1({0x51db438, 0xc005c845a0}, 0xc003ec2700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:50 +0x21c\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc005c845a0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc005c845a0}, 0xc003ec2700)\\n\\t/home/prow/go/s"}
{"Time":"2021-10-13T22:46:54.405518214Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestSelfSubjectAccessReview","Output":"rc/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc005c84690, {0x51db438, 0xc005c845a0}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc005c845a0}, 0xc003ec2700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc005c845a0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc005c845a0}, 0xc003ec2700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc005c84690,"}
{"Time":"2021-10-13T22:47:08.977091357Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"cal/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:225 +0xcc\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0x3d56020, 0xc0105d11e0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:616 +0x29\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc010512300, {0xc00e359500, 0xa3, 0x13c2})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:228 +0x59b\\nencoding/json.(*Encoder).Encode(0xc0105fd078, {0x48e9820, 0xc0102959a0})\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1f6\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0x4996872, {0x51c6390, 0xc0102959a0}, {0x50e5240, 0xc010512300})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_outp"}
{"Time":"2021-10-13T22:47:08.977100556Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"ut/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:327 +0x19a\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc0003b9630, {0x51c6390, 0xc0102959a0}, {0x50e5240, 0xc010512300})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:301 +0xfc\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc010295a40, {0x51c6390, 0xc0102959a0}, {0x50e5240, 0xc010512300})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0xb62\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc010295a40, {0x51c6390, 0xc0102959a0}, {0x50e5240, 0xc010512300})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg"}
{"Time":"2021-10-13T22:47:08.977108878Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAuthModeAlwaysAllow","Output":"/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x49b5458, 0x10}, {0x7f2bcc0da6f8, 0xc010295a40}, {0x51db438, 0xc0105d7a40}, 0xc0105f2100, 0x1f7, {0x51c6390, 0xc0102959a0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x51de078, 0xc005422b80}, {0x51de288, 0x7c37658}, {{0x0, 0x1c52a2d}, {0x4996872, 0x51e0af8}}, {0x51db438, 0xc0105d7a40}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated({0x50e4a60, 0xc010295900}, {0x51de078, 0xc005422b80}, {{0x0, 0x51c6d40}, {0x4996872, 0x46"}
... skipping 6 lines ...
{"Time":"2021-10-13T22:47:11.481035243Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x690bc43, 0x10}, {0x7f2fec2d8bb8, 0xc0169cbd60}, {0x716cd88, 0xc00c8811d0}, 0xc01d7ad600, 0x1f4, {0x7157478, 0xc0169cbcc0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x716fc98, 0xc01a37a8c0}, {0x716ff38, 0x9d4e308}, {{0x68ed8d4, 0x6adc748}, {0x68ec11e, 0x0}}, {0x716cd88, 0xc00c8811d0}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated({0x70763a0, 0xc005172588}, {0x716f"}
{"Time":"2021-10-13T22:47:11.481056436Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"netes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc00c881110, 0xc000149a40)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:491 +0x223\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc010775b90, {0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0x9d1\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:199\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP({{0x6904e76, 0x37e18df}, 0xc010775b90, 0xc0095e88c0}, {0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/"}
{"Time":"2021-10-13T22:47:11.481073139Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"handler.go:146 +0x5bb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc0107216e0, {0x716cd88, 0xc00c880db0}, 0x0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1({0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authorization.go:64 +0x425\\nnet/http.HandlerFunc.ServeHTTP(0x9d4e308, {0x716cd88, 0xc00c880db0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_outp"}
{"Time":"2021-10-13T22:47:11.481086822Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"ut/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc00c880e40, {0x716cd88, 0xc00c880db0}, 0x29ec62a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc008db2800, {0x716cd88, 0xc00c880db0}, 0xc008305ec0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1.8()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:269 +0x118\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle.fun"}
{"Time":"2021-10-13T22:47:11.481095663Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"c2()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:180 +0x1eb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish.func1(0xc01027a690, 0xc01771cb68, 0x2169ac7)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:344 +0x65\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish(0xc01dd84164, 0x9)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:345 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle(0xc005c19320, {0x7172a18, 0xc00c880ea0}, {0xc00a331810, {0x7173af0, 0xc008db2700}, {0x1, 0x0, 0x0}}, 0xc0056eece0, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/sr"}
{"Time":"2021-10-13T22:47:11.481103636Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"c/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:170 +0x8d3\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1({0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:272 +0xdfd\\nnet/http.HandlerFunc.ServeHTTP(0x9d4e308, {0x716cd88, 0xc00c880db0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x716cd88, 0xc00c880db0}, 0xc01d7ad600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc00c880e40, {0x716cd88, 0xc00c880db0}, 0x29ec62a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x"}
{"Time":"2021-10-13T22:47:11.481140902Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=","Output":"ints/filterlatency/filterlatency.go:89 +0x46b\\nnet/http.HandlerFunc.ServeHTTP(0x227db80, {0x716cd88, 0xc00c880db0}, 0xc012816790)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:110 +0x70\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:96 +0x1bd\\n\" addedInfo=\"\\nlogging error output: \\\"{\\\\\\\"kind\\\\\\\":\\\\\\\"Status\\\\\\\",\\\\\\\"apiVersion\\\\\\\":\\\\\\\"v1\\\\\\\",\\\\\\\"metadata\\\\\\\":{},\\\\\\\"status\\\\\\\":\\\\\\\"Failure\\\\\\\",\\\\\\\"message\\\\\\\":\\\\\\\"resourceVersion: Invalid value: \\\\\\\\\\\\\\\"invalid\\\\\\\\\\\\\\\": strconv.ParseUint: parsing \\\\\\\\\\\\\\\"invalid\\\\\\\\\\\\\\\": invalid syntax\\\\\\\",\\\\\\\"code\\\\\\\":500}\\\\n\\\"\\n\"\n"}
{"Time":"2021-10-13T22:47:11.482762198Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"outWriter).WriteHeader(0x10, 0x9d1dde0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:225 +0xcc\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0x5c6f7e0, 0xc0163534a0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:616 +0x29\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc00a35bc20, {0xc0072be000, 0xc0, 0x4a72})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:228 +0x59b\\nencoding/json.(*Encoder).Encode(0xc01771b030, {0x683cd60, 0xc0169eabe0})\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1f6\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0x68ec11e, {0x715747"}
{"Time":"2021-10-13T22:47:11.482780758Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"c/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x690bc43, 0x10}, {0x7f2fec2d8bb8, 0xc0169eadc0}, {0x716cd88, 0xc00c90e210}, 0xc01d7d0c00, 0x1f4, {0x7157478, 0xc0169eabe0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x716fc98, 0xc01a37a8c0}, {0x716ff38, 0x9d4e308}, {{0x68ed8d4, 0x6adc748}, {0x68ec11e, 0x0}}, {0x716cd88, 0xc00c90e210}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated("}
{"Time":"2021-10-13T22:47:11.48279734Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"taller.go:1174 +0x6b\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc00c90e150, 0xc000bfc150)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:491 +0x223\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc010775b90, {0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0x9d1\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:199\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP({{0x6904e76, 0x37e18df}, 0xc010775b90, 0xc0095e88c0}, {0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/v"}
{"Time":"2021-10-13T22:47:11.48281584Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"endor/k8s.io/apiserver/pkg/server/handler.go:146 +0x5bb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc0107216e0, {0x716cd88, 0xc00c881ce0}, 0x0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authorization.go:64 +0x425\\nnet/http.HandlerFunc.ServeHTTP(0x9d4e308, {0x716cd88, 0xc00c881ce0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/p"}
{"Time":"2021-10-13T22:47:11.482823452Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"row/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc00c881e60, {0x716cd88, 0xc00c881ce0}, 0x29ec62a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc008db2c40, {0x716cd88, 0xc00c881ce0}, 0xc008305ec0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1.8()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:269 +0x118\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcont"}
{"Time":"2021-10-13T22:47:11.482831072Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"rol.(*configController).Handle.func2()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:180 +0x1eb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish.func1(0xc01027a780, 0xc01771cb68, 0x2169ac7)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:344 +0x65\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish(0xc01dd84164, 0x9)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:345 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle(0xc005c19320, {0x7172a18, 0xc00c881ec0}, {0xc00a331a20, {0x7173af0, 0xc008db2b40}, {0x1, 0x0, 0x0}}, 0xc0056ef080, ...)\\n\\t/home/prow/go/src/k8s"}
{"Time":"2021-10-13T22:47:11.482838743Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":".io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:170 +0x8d3\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:272 +0xdfd\\nnet/http.HandlerFunc.ServeHTTP(0x9d4e308, {0x716cd88, 0xc00c881ce0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc00c881e60, {0x716cd88, 0xc00c881ce0}, 0x29ec62a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filt"}
{"Time":"2021-10-13T22:47:11.482853952Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc00c881e60, {0x716cd88, 0xc00c881ce0}, 0x29ec62a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0x9d4e308, {0x716cd88, 0xc00c881ce0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x716cd88, 0xc00c881ce0}, 0xc01d7d0c00)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc00c881e60, {0x716cd88, 0xc00c881ce0}, 0x29ec62a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/ven"}
{"Time":"2021-10-13T22:47:11.482873189Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver","Test":"TestListOptions/watchCacheEnabled=true/limit=0_continue=empty_rv=invalid_rvMatch=NotOlderThan","Output":"/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:89 +0x46b\\nnet/http.HandlerFunc.ServeHTTP(0x227db80, {0x716cd88, 0xc00c881ce0}, 0xc012816790)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:110 +0x70\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:96 +0x1bd\\n\" addedInfo=\"\\nlogging error output: \\\"{\\\\\\\"kind\\\\\\\":\\\\\\\"Status\\\\\\\",\\\\\\\"apiVersion\\\\\\\":\\\\\\\"v1\\\\\\\",\\\\\\\"metadata\\\\\\\":{},\\\\\\\"status\\\\\\\":\\\\\\\"Failure\\\\\\\",\\\\\\\"message\\\\\\\":\\\\\\\"resourceVersion: Invalid value: \\\\\\\\\\\\\\\"invalid\\\\\\\\\\\\\\\": strconv.ParseUint: parsing \\\\\\\\\\\\\\\"invalid\\\\\\\\\\\\\\\": invalid syntax\\\\\\\",\\\\\\\"code\\\\\\\":500}\\\\n\\\"\\n\"\n"}
{"Time":"2021-10-13T22:47:13.669637886Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apimachinery","Output":"ok  \tk8s.io/kubernetes/test/integration/apimachinery\t69.564s\n"}
{"Time":"2021-10-13T22:47:27.797439201Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:225 +0xcc\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0x3d56020, 0xc00b939b80)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:616 +0x29\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc0084f1da0, {0xc00039b800, 0xa3, 0x46d0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:228 +0x59b\\nencoding/json.(*Encoder).Encode(0xc00a2c3078, {0x48e9820, 0xc005540f00})\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1f6\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0x4996872, {0x51c6390, 0xc005540f00}, {0x50e5240, 0xc0084f1da0})\\n\\t/home/prow/go/src"}
{"Time":"2021-10-13T22:47:27.797448242Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:327 +0x19a\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc0003b9630, {0x51c6390, 0xc005540f00}, {0x50e5240, 0xc0084f1da0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:301 +0xfc\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc005541040, {0x51c6390, 0xc005540f00}, {0x50e5240, 0xc0084f1da0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0xb62\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc005541040, {0x51c6390, 0xc005540f00}, {0x50e5240, 0xc0084f1da0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor"}
{"Time":"2021-10-13T22:47:27.797458311Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x49b5458, 0x10}, {0x7f2bcc0da6f8, 0xc005541040}, {0x51db438, 0xc005d260f0}, 0xc002abc600, 0x1f7, {0x51c6390, 0xc005540f00})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x51de078, 0xc00fcbe200}, {0x51de288, 0x7c37658}, {{0x0, 0x1c52a2d}, {0x4996872, 0x51e0af8}}, {0x51db438, 0xc005d260f0}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated({0x50e4a60, 0xc005540d20}, {0x51de078, 0xc00fcbe200}, {{0x0, 0x51"}
{"Time":"2021-10-13T22:47:27.797507022Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"4 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc004d8c040, {0x51db438, 0xc0057a1c50}, 0xc0061bb500)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1.8()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:269 +0x118\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle.func2()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:180 +0x1eb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish.func1(0xc001a12870, 0xc013ba4b68, 0x40f087)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:344 +0x65\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/"}
{"Time":"2021-10-13T22:47:27.797514034Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestAliceNotForbiddenOrUnauthorized","Output":"fairqueuing/queueset.(*request).Finish(0xc015c8fc10, 0xe)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:345 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle(0xc0100087e0, {0x51e0af8, 0xc0057a1dd0}, {0xc000fd96b0, {0x51e1658, 0xc003c1bf80}, {0x1, 0x0, 0x0}}, 0xc00011e7c0, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:170 +0x8d3\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1({0x51db438, 0xc0057a1c50}, 0xc002abc600)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:272 +0xdfd\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc0057a1c50}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor"}
... skipping 5 lines ...
{"Time":"2021-10-13T22:47:52.533474008Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"endor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x49b5458, 0x10}, {0x7f2bcc0da6f8, 0xc01de0c0a0}, {0x51db438, 0xc015cafb30}, 0xc00b69d800, 0x1f4, {0x51c6390, 0xc01de0c000})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x51de078, 0xc01e5f9180}, {0x51de288, 0x7c37658}, {{0x0, 0xc00ff54ba0}, {0x4996872, 0xc01c789f40}}, {0x51db438, 0xc015cafb30}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated({0x50dd780, 0xc00f8bac90}, {0x51de078, 0xc01e5f9180}, "}
{"Time":"2021-10-13T22:47:52.533494462Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"piserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc015cafa70, 0xc01550de30)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:491 +0x223\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc01e5fb4d0, {0x51db438, 0xc015caf650}, 0xc00b69d800)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0x9d1\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:199\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP({{0x49ae9d8, 0x16f345f}, 0xc01e5fb4d0, 0xc01e2779d0}, {0x51db438, 0xc015caf650}, 0xc00b69d800)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/handler.go:146 +0x5bb"}
{"Time":"2021-10-13T22:47:52.533521521Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"o/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc01120a878, {0x51db438, 0xc015caf650}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc015caf650}, 0xc00b69d800)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc00f64eec0, {0x51db438, 0xc015caf650}, 0xc0061bb500)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1.8()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:269 +0x118\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle.func2()\\n\\t/home/prow/go"}
{"Time":"2021-10-13T22:47:52.533530948Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:180 +0x1eb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish.func1(0xc00f832960, 0xc01120ab68, 0x40f087)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:344 +0x65\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish(0xc00433c370, 0xe)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:345 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle(0xc01e3efb00, {0x51e0af8, 0xc015caf830}, {0xc0169e46e0, {0x51e1658, 0xc00f64edc0}, {0x1, 0x0, 0x0}}, 0xc00e67f380, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/ve"}
{"Time":"2021-10-13T22:47:52.533539734Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"ndor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:170 +0x8d3\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1({0x51db438, 0xc015caf650}, 0xc00b69d800)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:272 +0xdfd\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc015caf650}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc015caf650}, 0xc00b69d800)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc015d4b088, {0x51db438, 0xc015caf650}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc015caf650}"}
{"Time":"2021-10-13T22:47:52.533548636Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":", 0xc00b69d800)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc015d4b6d0, {0x51db438, 0xc015caf650}, 0xf)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithImpersonation.func1({0x51db438, 0xc015caf650}, 0xc00b69d700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/impersonation.go:182 +0x1f3f\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc015caf650}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc015caf650}, 0xc00b69d700)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.Se"}
{"Time":"2021-10-13T22:47:52.533593889Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"y/filterlatency.go:89 +0x46b\\nnet/http.HandlerFunc.ServeHTTP(0xc01c5ed500, {0x51db438, 0xc015caf650}, 0xc008e24fd0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:110 +0x70\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:96 +0x1bd\\n\" addedInfo=\"\\n\u0026{bob 2 [system:authenticated] map[]} is acting as \u0026{alice  [system:authenticated] map[]}\\nlogging error output: \\\"{\\\\\\\"kind\\\\\\\":\\\\\\\"Status\\\\\\\",\\\\\\\"apiVersion\\\\\\\":\\\\\\\"v1\\\\\\\",\\\\\\\"metadata\\\\\\\":{},\\\\\\\"status\\\\\\\":\\\\\\\"Failure\\\\\\\",\\\\\\\"message\\\\\\\":\\\\\\\"couldn't get version/kind; json parse error: invalid character '%' after object key:value pair\\\\\\\",\\\\\\\"code\\\\\\\":5"}
{"Time":"2021-10-13T22:47:53.46783585Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"xc000500000)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:225 +0xcc\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0x3d56020, 0xc01c6d5340)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:616 +0x29\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc01b84ca20, {0xc00039b800, 0xbb, 0x46d0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:228 +0x59b\\nencoding/json.(*Encoder).Encode(0xc020b8cdd0, {0x48e9820, 0xc01f478000})\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1f6\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0x4996872, {0x51c6390, 0xc01f478000}, {0x50e524"}
{"Time":"2021-10-13T22:47:53.467844972Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"0, 0xc01b84ca20})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:327 +0x19a\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc0003b9630, {0x51c6390, 0xc01f478000}, {0x50e5240, 0xc01b84ca20})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:301 +0xfc\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc01f4780a0, {0x51c6390, 0xc01f478000}, {0x50e5240, 0xc01b84ca20})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0xb62\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc01f4780a0, {0x51c6390, 0xc01f478000}, {0x50e5240, 0xc01b84ca20})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output"}
{"Time":"2021-10-13T22:47:53.467852876Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x49b5458, 0x10}, {0x7f2bcc0da6f8, 0xc01f4780a0}, {0x51db438, 0xc01624e7b0}, 0xc00c302200, 0x1f4, {0x51c6390, 0xc01f478000})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x51de078, 0xc01e5f9180}, {0x51de288, 0x7c37658}, {{0x0, 0xc01b84c900}, {0x4996872, 0xc01f42dea0}}, {0x51db438, 0xc01624e7b0}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated({0x50dd780, 0xc00ecbc"}
{"Time":"2021-10-13T22:47:53.467870249Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.InstrumentRouteFunc.func1(0xc01624e6f0, 0xc01599b3b0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:491 +0x223\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).dispatch(0xc01e5fb4d0, {0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:288 +0x9d1\\nk8s.io/kubernetes/vendor/github.com/emicklei/go-restful.(*Container).Dispatch(...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/github.com/emicklei/go-restful/container.go:199\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server.director.ServeHTTP({{0x49ae9d8, 0x16f345f}, 0xc01e5fb4d0, 0xc01e2779d0}, {0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver"}
{"Time":"2021-10-13T22:47:53.467884821Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"/pkg/server/handler.go:146 +0x5bb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc01e3d1968, {0x51db438, 0xc01624e2d0}, 0x0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters.WithAuthorization.func1({0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filters/authorization.go:64 +0x425\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc01624e2d0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kube"}
{"Time":"2021-10-13T22:47:53.467892553Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"rnetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc020b8e878, {0x51db438, 0xc01624e2d0}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0xc00fadb600, {0x51db438, 0xc01624e2d0}, 0xc016b547e0)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1.8()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:269 +0x118\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController"}
{"Time":"2021-10-13T22:47:53.467899826Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":").Handle.func2()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:180 +0x1eb\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish.func1(0xc016d6e2d0, 0xc020b8eb68, 0x40f087)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:344 +0x65\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset.(*request).Finish(0xc00117d060, 0xc)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/fairqueuing/queueset/queueset.go:345 +0x45\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol.(*configController).Handle(0xc01e3efb00, {0x51e0af8, 0xc01624e4b0}, {0xc01e39e000, {0x51e1658, 0xc00fadb500}, {0x1, 0x0, 0x0}}, 0xc00ef61460, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/"}
{"Time":"2021-10-13T22:47:53.467906803Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/util/flowcontrol/apf_filter.go:170 +0x8d3\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.WithPriorityAndFairness.func1({0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/priority-and-fairness.go:272 +0xdfd\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc01624e2d0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc01624e2d0}, 0xc00c302200)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc0204ef088, {0x51db438, 0xc01624e2d0}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackComplete"}
{"Time":"2021-10-13T22:47:53.467922971Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"0 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc01624e360, {0x51db438, 0xc01624e2d0}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackCompleted.func1({0x51db438, 0xc01624e2d0}, 0xc00c302100)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:104 +0x1a5\\nnet/http.HandlerFunc.ServeHTTP(0x7c37658, {0x51db438, 0xc01624e2d0}, 0xa)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency.trackStarted.func1({0x51db438, 0xc01624e2d0}, 0xc00c302100)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/filterlatency/filterlatency.go:80 +0x178\\nnet/http.HandlerFunc.ServeHTTP(0xc01624e360, {0x51db438, 0xc01624e2d0}, 0x119382a)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserv"}
{"Time":"2021-10-13T22:47:53.467941398Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/auth","Test":"TestImpersonateIsForbidden","Output":"server/pkg/endpoints/filterlatency/filterlatency.go:89 +0x46b\\nnet/http.HandlerFunc.ServeHTTP(0xc00c2662a0, {0x51db438, 0xc01624e2d0}, 0xc00edaaf78)\\n\\t/usr/local/go/src/net/http/server.go:2046 +0x2f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP.func1()\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:110 +0x70\\ncreated by k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters.(*timeoutHandler).ServeHTTP\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:96 +0x1bd\\n\" addedInfo=\"\\n\u0026{bob 2 [system:authenticated] map[]} is acting as \u0026{system:serviceaccount:default:default  [system:serviceaccounts system:serviceaccounts:default system:authenticated] map[]}\\nlogging error output: \\\"{\\\\\\\"kind\\\\\\\":\\\\\\\"Status\\\\\\\",\\\\\\\"apiVersion\\\\\\\":\\\\\\\"v1\\\\\\\",\\\\\\\"metadata\\\\\\\":{},\\\\\\\"status\\\\\\\":\\\\\\\"Failure\\\\\\\",\\\\\\\"message\\\\\\"}
{"Time":"2021-10-13T22:47:54.499691627Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver/apply","Test":"TestCreateVeryLargeObject","Output":"0, 0x7bdf0e0)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/server/filters/timeout.go:225 +0xcc\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics.(*ResponseWriterDelegator).WriteHeader(0x3d496e0, 0xc02cf83080)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/metrics/metrics.go:616 +0x29\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.(*deferredResponseWriter).Write(0xc01c7c13e0, {0xc04394c000, 0x7d, 0x648c})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:228 +0x59b\\nencoding/json.(*Encoder).Encode(0xc036dbc9c8, {0x48d9fa0, 0xc00a1b23c0})\\n\\t/usr/local/go/src/encoding/json/stream.go:231 +0x1f6\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).doEncode(0x4986ffa, {0x51b96f0, 0xc00a1b23c0}, {0x50d89"}
{"Time":"2021-10-13T22:47:54.499708865Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver/apply","Test":"TestCreateVeryLargeObject","Output":"a0, 0xc01c7c13e0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:327 +0x19a\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json.(*Serializer).Encode(0xc000324320, {0x51b96f0, 0xc00a1b23c0}, {0x50d89a0, 0xc01c7c13e0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/json/json.go:301 +0xfc\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).doEncode(0xc00a1b2460, {0x51b96f0, 0xc00a1b23c0}, {0x50d89a0, 0xc01c7c13e0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:228 +0xb62\\nk8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning.(*codec).Encode(0xc00a1b2460, {0x51b96f0, 0xc00a1b23c0}, {0x50d89a0, 0xc01c7c13e0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_outpu"}
{"Time":"2021-10-13T22:47:54.499725731Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver/apply","Test":"TestCreateVeryLargeObject","Output":"t/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/runtime/serializer/versioning/versioning.go:184 +0x106\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.SerializeObject({0x49a5c14, 0x10}, {0x7fd7e7718d00, 0xc00a1b2460}, {0x51ce778, 0xc00a4ccba0}, 0xc025f1ca00, 0x1f4, {0x51b96f0, 0xc00a1b23c0})\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:106 +0x4a9\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.WriteObjectNegotiated({0x51d13b8, 0xc02b556580}, {0x51d15c8, 0x7c0f558}, {{0x0, 0x0}, {0x4986ffa, 0xc0159bf3a0}}, {0x51ce778, 0xc00a4ccba0}, ...)\\n\\t/home/prow/go/src/k8s.io/kubernetes/_output/local/go/src/k8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters/writers.go:275 +0x50f\\nk8s.io/kubernetes/vendor/k8s.io/apiserver/pkg/endpoints/handlers/responsewriters.ErrorNegotiated({0x50dc020, 0xc01e5e0f78}, {0"}
{"Time":"2021-10-13T22:47:54.499748008Z","Action":"output","Package":"k8s.io/kubernetes/test/integration/apiserver/apply","Test":"TestCreateVeryLargeObject","Output&