PRmsau42: Automated cherry pick of #71804: Move unmount volume util from pkg/volume/util to
ResultFAILURE
Tests 1 failed / 333 succeeded
Started2019-01-12 00:55
Elapsed25m34s
Revision
Buildergke-prow-containerd-pool-99179761-nfzr
Refs release-1.11:b9e3e545
72601:42a7310d
podba8f10f3-1604-11e9-b59d-0a580a6c0288
infra-commitfd3539600
podba8f10f3-1604-11e9-b59d-0a580a6c0288
repok8s.io/kubernetes
repo-commita01f7e72b2c47af60d054066920fb9af6269aab1
repos{u'k8s.io/kubernetes': u'release-1.11:b9e3e545197e98486524492058c0e84926a2a4fb,72601:42a7310d52857be6608ab99c30d3d8f4d421909a'}

Test Failures


k8s.io/kubernetes/test/integration/scheduler TestPreemptionRaces 13s

go test -v k8s.io/kubernetes/test/integration/scheduler -run TestPreemptionRaces$
I0112 01:16:16.995585  121941 services.go:33] Network range for service cluster IPs is unspecified. Defaulting to {10.0.0.0 ffffff00}.
I0112 01:16:16.995695  121941 master.go:278] Node port range unspecified. Defaulting to 30000-32767.
I0112 01:16:16.995723  121941 master.go:234] Using reconciler: 
W0112 01:16:17.255029  121941 genericapiserver.go:319] Skipping API batch/v2alpha1 because it has no resources.
W0112 01:16:17.268317  121941 genericapiserver.go:319] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
W0112 01:16:17.269012  121941 genericapiserver.go:319] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
W0112 01:16:17.271826  121941 genericapiserver.go:319] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
W0112 01:16:17.289789  121941 genericapiserver.go:319] Skipping API admissionregistration.k8s.io/v1alpha1 because it has no resources.
E0112 01:16:17.888853  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:34471/api/v1/namespaces: dial tcp 127.0.0.1:34471: connect: connection refused
I0112 01:16:18.298146  121941 storage_scheduling.go:91] created PriorityClass system-node-critical with value 2000001000
I0112 01:16:18.301928  121941 storage_scheduling.go:91] created PriorityClass system-cluster-critical with value 2000000000
I0112 01:16:18.301952  121941 storage_scheduling.go:100] all system priority classes are created successfully or already exist.
I0112 01:16:18.310536  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/cluster-admin
I0112 01:16:18.314198  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:discovery
I0112 01:16:18.317481  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:basic-user
I0112 01:16:18.320698  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/admin
I0112 01:16:18.323558  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/edit
I0112 01:16:18.326424  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/view
I0112 01:16:18.332374  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-admin
I0112 01:16:18.338737  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-edit
I0112 01:16:18.347369  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-view
I0112 01:16:18.351458  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:heapster
I0112 01:16:18.354935  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node
I0112 01:16:18.358525  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-problem-detector
I0112 01:16:18.361908  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-proxier
I0112 01:16:18.365193  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kubelet-api-admin
I0112 01:16:18.368975  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-bootstrapper
I0112 01:16:18.372161  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:auth-delegator
I0112 01:16:18.374982  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-aggregator
I0112 01:16:18.377938  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-controller-manager
I0112 01:16:18.386531  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-scheduler
I0112 01:16:18.389864  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-dns
I0112 01:16:18.393747  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:persistent-volume-provisioner
I0112 01:16:18.397187  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:csi-external-provisioner
I0112 01:16:18.400130  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:csi-external-attacher
I0112 01:16:18.403402  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aws-cloud-provider
I0112 01:16:18.405856  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:nodeclient
I0112 01:16:18.408663  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient
I0112 01:16:18.411735  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:volume-scheduler
I0112 01:16:18.414837  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I0112 01:16:18.418770  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I0112 01:16:18.421612  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:cronjob-controller
I0112 01:16:18.424876  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I0112 01:16:18.427917  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:deployment-controller
I0112 01:16:18.431035  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:disruption-controller
I0112 01:16:18.436696  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:endpoint-controller
I0112 01:16:18.440570  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:expand-controller
I0112 01:16:18.443437  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I0112 01:16:18.446241  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I0112 01:16:18.449361  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:job-controller
I0112 01:16:18.452896  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:namespace-controller
I0112 01:16:18.455952  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:node-controller
I0112 01:16:18.461165  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I0112 01:16:18.464428  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I0112 01:16:18.467335  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:replicaset-controller
I0112 01:16:18.470484  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:replication-controller
I0112 01:16:18.473680  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I0112 01:16:18.477030  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:route-controller
I0112 01:16:18.480071  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:service-account-controller
I0112 01:16:18.482928  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:service-controller
I0112 01:16:18.488362  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:statefulset-controller
I0112 01:16:18.495245  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:ttl-controller
I0112 01:16:18.499149  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:certificate-controller
I0112 01:16:18.538517  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I0112 01:16:18.578265  121941 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I0112 01:16:18.621449  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/cluster-admin
I0112 01:16:18.660019  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:discovery
I0112 01:16:18.701636  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:basic-user
I0112 01:16:18.742811  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:node-proxier
I0112 01:16:18.779195  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-controller-manager
I0112 01:16:18.818684  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-dns
I0112 01:16:18.861182  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-scheduler
I0112 01:16:18.898780  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:aws-cloud-provider
I0112 01:16:18.942032  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:node
I0112 01:16:18.978924  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:volume-scheduler
I0112 01:16:19.024082  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I0112 01:16:19.059446  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I0112 01:16:19.098160  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:cronjob-controller
I0112 01:16:19.180322  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I0112 01:16:19.184263  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:deployment-controller
I0112 01:16:19.218892  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:disruption-controller
I0112 01:16:19.258516  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:endpoint-controller
I0112 01:16:19.298877  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:expand-controller
I0112 01:16:19.339377  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I0112 01:16:19.379576  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I0112 01:16:19.419592  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:job-controller
I0112 01:16:19.460348  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:namespace-controller
I0112 01:16:19.500609  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:node-controller
I0112 01:16:19.541108  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I0112 01:16:19.578485  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I0112 01:16:19.619015  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replicaset-controller
I0112 01:16:19.660429  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replication-controller
I0112 01:16:19.698522  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I0112 01:16:19.765636  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:route-controller
I0112 01:16:19.787872  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-account-controller
I0112 01:16:19.831302  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-controller
I0112 01:16:19.858512  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:statefulset-controller
I0112 01:16:19.898685  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:ttl-controller
I0112 01:16:19.938570  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:certificate-controller
I0112 01:16:19.978247  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I0112 01:16:20.019183  121941 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I0112 01:16:20.058652  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/extension-apiserver-authentication-reader in kube-system
I0112 01:16:20.098635  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I0112 01:16:20.139191  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I0112 01:16:20.178686  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I0112 01:16:20.219697  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I0112 01:16:20.258905  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I0112 01:16:20.298810  121941 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I0112 01:16:20.339027  121941 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I0112 01:16:20.378657  121941 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I0112 01:16:20.420680  121941 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I0112 01:16:20.468411  121941 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I0112 01:16:20.501092  121941 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I0112 01:16:20.540111  121941 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
W0112 01:16:20.595632  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.595841  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.595920  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.595977  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.596006  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.596025  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.596075  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.596106  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.596124  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0112 01:16:20.596216  121941 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
I0112 01:16:20.597157  121941 factory.go:1205] Created equivalence class cache
I0112 01:16:20.597248  121941 controller_utils.go:1025] Waiting for caches to sync for scheduler controller
I0112 01:16:20.697918  121941 controller_utils.go:1032] Caches are synced for scheduler controller
E0112 01:16:20.971984  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:42417/api/v1/namespaces: dial tcp 127.0.0.1:42417: connect: connection refused
E0112 01:16:21.203268  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:44955/api/v1/namespaces: dial tcp 127.0.0.1:44955: connect: connection refused
E0112 01:16:21.339063  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:46735/api/v1/namespaces: dial tcp 127.0.0.1:46735: connect: connection refused
I0112 01:16:21.916565  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:21.919522  121941 preemption_test.go:566] Creating additional pods...
I0112 01:16:22.283936  121941 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0112 01:16:22.381820  121941 preemption_test.go:597] Cleaning up all pods...
E0112 01:16:22.623440  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:33681/api/v1/namespaces: dial tcp 127.0.0.1:33681: connect: connection refused
I0112 01:16:23.080359  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:23.083115  121941 preemption_test.go:566] Creating additional pods...
I0112 01:16:23.459157  121941 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0112 01:16:23.624850  121941 preemption_test.go:597] Cleaning up all pods...
I0112 01:16:24.400340  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:24.403511  121941 preemption_test.go:566] Creating additional pods...
I0112 01:16:24.803875  121941 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0112 01:16:25.034688  121941 preemption_test.go:597] Cleaning up all pods...
E0112 01:16:25.391431  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:41443/api/v1/namespaces: dial tcp 127.0.0.1:41443: connect: connection refused
I0112 01:16:25.782218  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:25.785087  121941 preemption_test.go:566] Creating additional pods...
E0112 01:16:25.822008  121941 scheduler.go:251] Error preempting pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/rpod-1: pods "rpod-1" not found
I0112 01:16:26.200118  121941 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0112 01:16:26.338865  121941 preemption_test.go:597] Cleaning up all pods...
E0112 01:16:26.502406  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-15" not found
W0112 01:16:26.502557  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-15 no longer exists
W0112 01:16:26.528012  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-22 no longer exists
W0112 01:16:26.725273  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-45 no longer exists
I0112 01:16:27.248534  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:27.252484  121941 preemption_test.go:566] Creating additional pods...
I0112 01:16:27.728883  121941 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0112 01:16:27.863633  121941 preemption_test.go:597] Cleaning up all pods...
E0112 01:16:27.895098  121941 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:34471/api/v1/namespaces: dial tcp 127.0.0.1:34471: connect: connection refused
W0112 01:16:27.958487  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-6 no longer exists
E0112 01:16:27.958552  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-6" not found
W0112 01:16:27.961668  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-5 no longer exists
E0112 01:16:27.963134  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-5" not found
W0112 01:16:27.966397  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-4 no longer exists
E0112 01:16:27.972097  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-4" not found
W0112 01:16:27.976600  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-3 no longer exists
E0112 01:16:27.978519  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-3" not found
W0112 01:16:27.980853  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-2 no longer exists
E0112 01:16:27.984970  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-2" not found
W0112 01:16:27.993553  121941 factory.go:1493] A pod preemption-raceab4b9866-1607-11e9-851b-0242ac110002/ppod-9 no longer exists
E0112 01:16:27.993725  121941 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-9" not found
I0112 01:16:28.634783  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:28.642843  121941 preemption_test.go:566] Creating additional pods...
I0112 01:16:29.082713  121941 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0112 01:16:29.258217  121941 preemption_test.go:597] Cleaning up all pods...
I0112 01:16:30.163375  121941 preemption_test.go:560] Creating the preemptor pod...
I0112 01:16:30.166350  121941 preemption_test.go:566] Creating additional pods...
preemption_test.go:570: Test [ensures that other pods are not scheduled while preemptor is being marked as nominated (issue #72124)]: Error creating pending pod: 0-length response
				from junit_cae8d27844a37937152775ec7fb068d1755ac188_20190112-011151.xml

Find preemption-raceab4b9866-1607-11e9-851b-0242ac110002/rpod-1 mentions in log files | View test history on testgrid


Show 333 Passed Tests

Show 4 Skipped Tests

Error lines from build-log.txt

... skipping 10 lines ...
I0112 00:55:51.584] process 214 exited with code 0 after 0.0m
I0112 00:55:51.585] Call:  gcloud config get-value account
I0112 00:55:51.891] process 226 exited with code 0 after 0.0m
I0112 00:55:51.892] Will upload results to gs://kubernetes-jenkins/pr-logs using pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com
I0112 00:55:51.892] Call:  kubectl get -oyaml pods/ba8f10f3-1604-11e9-b59d-0a580a6c0288
W0112 00:55:52.053] The connection to the server localhost:8080 was refused - did you specify the right host or port?
E0112 00:55:52.056] Command failed
I0112 00:55:52.056] process 238 exited with code 1 after 0.0m
E0112 00:55:52.057] unable to upload podspecs: Command '['kubectl', 'get', '-oyaml', 'pods/ba8f10f3-1604-11e9-b59d-0a580a6c0288']' returned non-zero exit status 1
I0112 00:55:52.057] Root: /workspace
I0112 00:55:52.057] cd to /workspace
I0112 00:55:52.057] Checkout: /workspace/k8s.io/kubernetes release-1.11:b9e3e545197e98486524492058c0e84926a2a4fb,72601:42a7310d52857be6608ab99c30d3d8f4d421909a to /workspace/k8s.io/kubernetes
I0112 00:55:52.058] Call:  git init k8s.io/kubernetes
... skipping 496 lines ...
W0112 01:05:56.857] I0112 01:05:56.856619   72784 controller_utils.go:1025] Waiting for caches to sync for stateful set controller
W0112 01:05:56.857] I0112 01:05:56.857437   72784 controllermanager.go:479] Started "csrapproving"
W0112 01:05:56.858] I0112 01:05:56.857662   72784 certificate_controller.go:113] Starting certificate controller
W0112 01:05:56.858] I0112 01:05:56.857708   72784 controller_utils.go:1025] Waiting for caches to sync for certificate controller
W0112 01:05:56.858] I0112 01:05:56.857780   72784 job_controller.go:143] Starting job controller
W0112 01:05:56.858] I0112 01:05:56.857818   72784 controller_utils.go:1025] Waiting for caches to sync for job controller
W0112 01:05:56.858] E0112 01:05:56.858539   72784 core.go:72] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail
W0112 01:05:56.859] W0112 01:05:56.858929   72784 controllermanager.go:476] Skipping "service"
W0112 01:05:56.860] I0112 01:05:56.859612   72784 controllermanager.go:479] Started "replicationcontroller"
W0112 01:05:56.860] I0112 01:05:56.859813   72784 replica_set.go:182] Starting replicationcontroller controller
W0112 01:05:56.860] I0112 01:05:56.859835   72784 controller_utils.go:1025] Waiting for caches to sync for ReplicationController controller
W0112 01:05:56.860] I0112 01:05:56.860054   72784 controllermanager.go:479] Started "replicaset"
W0112 01:05:56.860] W0112 01:05:56.860093   72784 controllermanager.go:476] Skipping "csrsigning"
... skipping 41 lines ...
W0112 01:05:56.874] I0112 01:05:56.871185   72784 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {batch cronjobs}
W0112 01:05:56.874] I0112 01:05:56.871231   72784 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {extensions replicasets}
W0112 01:05:56.875] I0112 01:05:56.871264   72784 controllermanager.go:479] Started "resourcequota"
W0112 01:05:56.875] I0112 01:05:56.871418   72784 resource_quota_controller.go:278] Starting resource quota controller
W0112 01:05:56.875] I0112 01:05:56.871650   72784 controller_utils.go:1025] Waiting for caches to sync for resource quota controller
W0112 01:05:56.875] I0112 01:05:56.871743   72784 resource_quota_monitor.go:301] QuotaMonitor running
W0112 01:05:56.875] W0112 01:05:56.871675   72784 garbagecollector.go:649] failed to discover preferred resources: the cache has not been filled yet
W0112 01:05:56.875] I0112 01:05:56.872382   72784 controllermanager.go:479] Started "garbagecollector"
W0112 01:05:56.876] I0112 01:05:56.872502   72784 garbagecollector.go:133] Starting garbage collector controller
W0112 01:05:56.876] I0112 01:05:56.873424   72784 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
W0112 01:05:56.876] I0112 01:05:56.873441   72784 graph_builder.go:308] GraphBuilder running
W0112 01:05:56.876] I0112 01:05:56.873745   72784 controllermanager.go:479] Started "cronjob"
W0112 01:05:56.876] I0112 01:05:56.873800   72784 cronjob_controller.go:94] Starting CronJob Manager
... skipping 55 lines ...
W0112 01:05:57.093] I0112 01:05:57.093041   72784 controller_utils.go:1032] Caches are synced for disruption controller
W0112 01:05:57.093] I0112 01:05:57.093079   72784 disruption.go:296] Sending events to api server.
W0112 01:05:57.114] I0112 01:05:57.113728   72784 controller_utils.go:1032] Caches are synced for deployment controller
W0112 01:05:57.161] I0112 01:05:57.160834   72784 controller_utils.go:1032] Caches are synced for ReplicaSet controller
W0112 01:05:57.164] I0112 01:05:57.163569   72784 controller_utils.go:1032] Caches are synced for ClusterRoleAggregator controller
W0112 01:05:57.173] I0112 01:05:57.172908   72784 controller_utils.go:1032] Caches are synced for resource quota controller
W0112 01:05:57.180] E0112 01:05:57.179745   72784 clusterroleaggregation_controller.go:180] view failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "view": the object has been modified; please apply your changes to the latest version and try again
W0112 01:05:57.180] E0112 01:05:57.179848   72784 clusterroleaggregation_controller.go:180] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again
I0112 01:05:57.713] +++ [0112 01:05:57] On try 3, controller-manager: ok
I0112 01:05:57.940] node/127.0.0.1 created
I0112 01:05:57.953] +++ [0112 01:05:57] Checking kubectl version
I0112 01:05:58.046] Client Version: version.Info{Major:"1", Minor:"11+", GitVersion:"v1.11.7-beta.0.44+a01f7e72b2c47a", GitCommit:"a01f7e72b2c47af60d054066920fb9af6269aab1", GitTreeState:"clean", BuildDate:"2019-01-12T01:03:10Z", GoVersion:"go1.10.7", Compiler:"gc", Platform:"linux/amd64"}
I0112 01:05:58.046] Server Version: version.Info{Major:"1", Minor:"11+", GitVersion:"v1.11.7-beta.0.44+a01f7e72b2c47a", GitCommit:"a01f7e72b2c47af60d054066920fb9af6269aab1", GitTreeState:"clean", BuildDate:"2019-01-12T01:03:40Z", GoVersion:"go1.10.7", Compiler:"gc", Platform:"linux/amd64"}
W0112 01:05:58.147] W0112 01:05:57.941711   72784 actual_state_of_world.go:491] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist
W0112 01:05:58.150] I0112 01:05:58.150356   72784 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
W0112 01:05:58.166] I0112 01:05:58.165661   72784 controller_utils.go:1025] Waiting for caches to sync for resource quota controller
W0112 01:05:58.174] I0112 01:05:58.173714   72784 controller_utils.go:1032] Caches are synced for garbage collector controller
W0112 01:05:58.174] I0112 01:05:58.173746   72784 garbagecollector.go:142] Garbage collector: all resource monitors have synced. Proceeding to collect garbage
W0112 01:05:58.251] I0112 01:05:58.250679   72784 controller_utils.go:1032] Caches are synced for garbage collector controller
W0112 01:05:58.266] I0112 01:05:58.265923   72784 controller_utils.go:1032] Caches are synced for resource quota controller
... skipping 80 lines ...
I0112 01:06:02.594] +++ working dir: /go/src/k8s.io/kubernetes
I0112 01:06:02.597] +++ command: run_RESTMapper_evaluation_tests
I0112 01:06:02.608] +++ [0112 01:06:02] Creating namespace namespace-1547255162-16823
I0112 01:06:02.690] namespace/namespace-1547255162-16823 created
I0112 01:06:02.770] Context "test" modified.
I0112 01:06:02.777] +++ [0112 01:06:02] Testing RESTMapper
I0112 01:06:02.924] +++ [0112 01:06:02] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype"
I0112 01:06:02.936] +++ exit code: 0
I0112 01:06:03.071] NAME                              SHORTNAMES   APIGROUP                       NAMESPACED   KIND
I0112 01:06:03.072] bindings                                                                      true         Binding
I0112 01:06:03.072] componentstatuses                 cs                                          false        ComponentStatus
I0112 01:06:03.072] configmaps                        cm                                          true         ConfigMap
I0112 01:06:03.072] endpoints                         ep                                          true         Endpoints
... skipping 583 lines ...
I0112 01:06:26.773] test-cmd-util.sh:444: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:06:26.979] (Btest-cmd-util.sh:448: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:06:27.095] (Btest-cmd-util.sh:452: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:06:27.304] (Btest-cmd-util.sh:456: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:06:27.415] (Btest-cmd-util.sh:460: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:06:27.517] (Bpod "valid-pod" force deleted
W0112 01:06:27.618] error: resource(s) were provided, but no name, label selector, or --all flag specified
W0112 01:06:27.618] error: setting 'all' parameter but found a non empty selector. 
W0112 01:06:27.618] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0112 01:06:27.719] test-cmd-util.sh:464: Successful get pods -l'name in (valid-pod)' {{range.items}}{{$id_field}}:{{end}}: 
I0112 01:06:27.759] (Btest-cmd-util.sh:469: Successful get namespaces {{range.items}}{{ if eq $id_field \"test-kubectl-describe-pod\" }}found{{end}}{{end}}:: :
I0112 01:06:27.847] (Bnamespace/test-kubectl-describe-pod created
I0112 01:06:27.959] test-cmd-util.sh:473: Successful get namespaces/test-kubectl-describe-pod {{.metadata.name}}: test-kubectl-describe-pod
I0112 01:06:28.065] (Btest-cmd-util.sh:477: Successful get secrets --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 11 lines ...
I0112 01:06:29.209] (Bpoddisruptionbudget.policy/test-pdb-3 created
I0112 01:06:29.334] test-cmd-util.sh:506: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2
I0112 01:06:29.427] (Bpoddisruptionbudget.policy/test-pdb-4 created
I0112 01:06:29.553] test-cmd-util.sh:510: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50%
I0112 01:06:29.770] (Btest-cmd-util.sh:516: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:06:29.979] (Bpod/env-test-pod created
W0112 01:06:30.079] error: min-available and max-unavailable cannot be both specified
I0112 01:06:30.207] test-cmd-util.sh:519: Successful describe pods --namespace=test-kubectl-describe-pod env-test-pod:
I0112 01:06:30.207] Name:               env-test-pod
I0112 01:06:30.207] Namespace:          test-kubectl-describe-pod
I0112 01:06:30.207] Priority:           0
I0112 01:06:30.207] PriorityClassName:  <none>
I0112 01:06:30.207] Node:               <none>
... skipping 161 lines ...
I0112 01:06:44.897] (Bpod/valid-pod patched
I0112 01:06:45.000] test-cmd-util.sh:721: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: changed-with-yaml:
I0112 01:06:45.086] (Bpod/valid-pod patched
I0112 01:06:45.200] test-cmd-util.sh:726: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.1:
I0112 01:06:45.384] (Bpod/valid-pod patched
I0112 01:06:45.501] test-cmd-util.sh:742: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I0112 01:06:45.731] (B+++ [0112 01:06:45] "kubectl patch with resourceVersion 489" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again
I0112 01:06:46.034] pod "valid-pod" deleted
I0112 01:06:46.048] pod/valid-pod replaced
I0112 01:06:46.182] test-cmd-util.sh:766: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname
I0112 01:06:46.368] (BSuccessful
I0112 01:06:46.369] message:error: --grace-period must have --force specified
I0112 01:06:46.369] has:\-\-grace-period must have \-\-force specified
I0112 01:06:46.549] Successful
I0112 01:06:46.549] message:error: --timeout must have --force specified
I0112 01:06:46.549] has:\-\-timeout must have \-\-force specified
I0112 01:06:46.717] node/node-v1-test created
W0112 01:06:46.817] W0112 01:06:46.716494   72784 actual_state_of_world.go:491] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist
I0112 01:06:46.918] node/node-v1-test replaced
I0112 01:06:47.014] test-cmd-util.sh:803: Successful get node node-v1-test {{.metadata.annotations.a}}: b
I0112 01:06:47.120] (Bnode "node-v1-test" deleted
I0112 01:06:47.248] test-cmd-util.sh:810: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I0112 01:06:47.579] (Btest-cmd-util.sh:813: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/serve_hostname:
I0112 01:06:48.682] (Btest-cmd-util.sh:826: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
... skipping 17 lines ...
I0112 01:06:48.965]     name: kubernetes-pause
I0112 01:06:48.965] has:localonlyvalue
I0112 01:06:48.989] test-cmd-util.sh:836: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I0112 01:06:49.237] (Btest-cmd-util.sh:840: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I0112 01:06:49.356] (Btest-cmd-util.sh:844: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I0112 01:06:49.460] (Bpod/valid-pod labeled
W0112 01:06:49.561] error: 'name' already has a value (valid-pod), and --overwrite is false
I0112 01:06:49.661] test-cmd-util.sh:848: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod-super-sayan
I0112 01:06:49.682] (Btest-cmd-util.sh:852: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:06:49.776] (Bpod "valid-pod" force deleted
I0112 01:06:49.896] test-cmd-util.sh:856: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:06:49.902] (B+++ [0112 01:06:49] Creating namespace namespace-1547255209-1718
I0112 01:06:49.995] namespace/namespace-1547255209-1718 created
... skipping 81 lines ...
I0112 01:06:58.218] +++ Running case: test-cmd.run_kubectl_create_error_tests 
I0112 01:06:58.220] +++ working dir: /go/src/k8s.io/kubernetes
I0112 01:06:58.223] +++ command: run_kubectl_create_error_tests
I0112 01:06:58.234] +++ [0112 01:06:58] Creating namespace namespace-1547255218-29665
I0112 01:06:58.326] namespace/namespace-1547255218-29665 created
I0112 01:06:58.430] Context "test" modified.
I0112 01:06:58.437] +++ [0112 01:06:58] Testing kubectl create with error
W0112 01:06:58.538] Error: required flag(s) "filename" not set
W0112 01:06:58.538] 
W0112 01:06:58.538] 
W0112 01:06:58.538] Examples:
W0112 01:06:58.538]   # Create a pod using the data in pod.json.
W0112 01:06:58.538]   kubectl create -f ./pod.json
W0112 01:06:58.538]   
... skipping 38 lines ...
W0112 01:06:58.545]   kubectl create -f FILENAME [options]
W0112 01:06:58.545] 
W0112 01:06:58.545] Use "kubectl <command> --help" for more information about a given command.
W0112 01:06:58.545] Use "kubectl options" for a list of global command-line options (applies to all commands).
W0112 01:06:58.545] 
W0112 01:06:58.546] required flag(s) "filename" not set
I0112 01:06:58.751] +++ [0112 01:06:58] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false
I0112 01:06:58.994] +++ exit code: 0
I0112 01:06:59.024] Recording: run_kubectl_apply_tests
I0112 01:06:59.025] Running command: run_kubectl_apply_tests
I0112 01:06:59.045] 
I0112 01:06:59.047] +++ Running case: test-cmd.run_kubectl_apply_tests 
I0112 01:06:59.049] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 20 lines ...
W0112 01:07:01.650] I0112 01:07:01.032974   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255219-24656", Name:"test-deployment-retainkeys-5f667997fd", UID:"5dc41604-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"503", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-deployment-retainkeys-5f667997fd-9vx79
I0112 01:07:01.750] test-cmd-util.sh:995: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:01.835] (Bpod/selector-test-pod created
I0112 01:07:01.950] test-cmd-util.sh:999: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
I0112 01:07:02.053] (BSuccessful
I0112 01:07:02.053] message:No resources found.
I0112 01:07:02.053] Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
I0112 01:07:02.054] has:pods "selector-test-pod-dont-apply" not found
I0112 01:07:02.149] pod "selector-test-pod" deleted
I0112 01:07:02.267] test-cmd-util.sh:1009: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:02.453] (Bpod/a created
I0112 01:07:03.996] test-cmd-util.sh:1014: Successful get pods a {{.metadata.name}}: a
I0112 01:07:04.102] (BSuccessful
I0112 01:07:04.102] message:No resources found.
I0112 01:07:04.102] Error from server (NotFound): pods "b" not found
I0112 01:07:04.102] has:pods "b" not found
I0112 01:07:04.252] pod/b created
I0112 01:07:04.264] pod/a pruned
I0112 01:07:05.991] test-cmd-util.sh:1022: Successful get pods b {{.metadata.name}}: b
I0112 01:07:06.127] (BSuccessful
I0112 01:07:06.128] message:No resources found.
I0112 01:07:06.128] Error from server (NotFound): pods "a" not found
I0112 01:07:06.128] has:pods "a" not found
I0112 01:07:06.247] pod "b" deleted
I0112 01:07:06.372] test-cmd-util.sh:1032: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:06.545] (Bpod/a created
I0112 01:07:06.667] test-cmd-util.sh:1037: Successful get pods a {{.metadata.name}}: a
I0112 01:07:06.777] (BSuccessful
I0112 01:07:06.778] message:No resources found.
I0112 01:07:06.778] Error from server (NotFound): pods "b" not found
I0112 01:07:06.778] has:pods "b" not found
I0112 01:07:06.944] pod/b created
I0112 01:07:07.073] test-cmd-util.sh:1045: Successful get pods a {{.metadata.name}}: a
I0112 01:07:07.198] (Btest-cmd-util.sh:1046: Successful get pods b {{.metadata.name}}: b
I0112 01:07:07.315] (Bpod "a" deleted
I0112 01:07:07.326] pod "b" deleted
I0112 01:07:07.519] Successful
I0112 01:07:07.519] message:error: all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector.
I0112 01:07:07.519] has:all resources selected for prune without explicitly passing --all
I0112 01:07:07.700] pod/a created
I0112 01:07:07.706] pod/b created
I0112 01:07:07.715] service/prune-svc created
I0112 01:07:09.261] test-cmd-util.sh:1058: Successful get pods a {{.metadata.name}}: a
I0112 01:07:09.367] (Btest-cmd-util.sh:1059: Successful get pods b {{.metadata.name}}: b
... skipping 126 lines ...
I0112 01:07:20.819] +++ [0112 01:07:20] Testing kubectl create filter
I0112 01:07:20.934] test-cmd-util.sh:1101: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:21.105] (Bpod/selector-test-pod created
I0112 01:07:21.217] test-cmd-util.sh:1105: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
I0112 01:07:21.325] (BSuccessful
I0112 01:07:21.325] message:No resources found.
I0112 01:07:21.326] Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
I0112 01:07:21.326] has:pods "selector-test-pod-dont-apply" not found
I0112 01:07:21.418] pod "selector-test-pod" deleted
I0112 01:07:21.437] +++ exit code: 0
I0112 01:07:21.477] Recording: run_kubectl_apply_deployments_tests
I0112 01:07:21.478] Running command: run_kubectl_apply_deployments_tests
I0112 01:07:21.497] 
... skipping 26 lines ...
I0112 01:07:23.384] (Btest-cmd-util.sh:1144: Successful get deployments my-depl {{.metadata.labels.l2}}: l2
I0112 01:07:23.480] (Bdeployment.extensions "my-depl" deleted
I0112 01:07:23.488] replicaset.extensions "my-depl-574c668485" deleted
I0112 01:07:23.492] replicaset.extensions "my-depl-844db54fcf" deleted
I0112 01:07:23.501] pod "my-depl-574c668485-msrxn" deleted
I0112 01:07:23.506] pod "my-depl-844db54fcf-gxrp7" deleted
W0112 01:07:23.607] E0112 01:07:23.504352   72784 replica_set.go:450] Sync "namespace-1547255241-28991/my-depl-844db54fcf" failed with Operation cannot be fulfilled on replicasets.apps "my-depl-844db54fcf": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1547255241-28991/my-depl-844db54fcf, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 6a5b4af5-1606-11e9-8033-0242ac110002, UID in object meta: 
W0112 01:07:23.607] E0112 01:07:23.505626   72784 replica_set.go:450] Sync "namespace-1547255241-28991/my-depl-574c668485" failed with replicasets.apps "my-depl-574c668485" not found
W0112 01:07:23.607] E0112 01:07:23.507161   72784 replica_set.go:450] Sync "namespace-1547255241-28991/my-depl-844db54fcf" failed with replicasets.apps "my-depl-844db54fcf" not found
I0112 01:07:23.708] test-cmd-util.sh:1150: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:23.723] (Btest-cmd-util.sh:1151: Successful get replicasets {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:23.835] (Btest-cmd-util.sh:1152: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:23.938] (Btest-cmd-util.sh:1156: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:24.113] (Bdeployment.extensions/nginx created
W0112 01:07:24.214] I0112 01:07:24.116501   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255241-28991", Name:"nginx", UID:"6b86a3a3-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"672", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-74d9fbb98 to 3
W0112 01:07:24.214] I0112 01:07:24.120236   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255241-28991", Name:"nginx-74d9fbb98", UID:"6b87256d-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"673", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74d9fbb98-d77r9
W0112 01:07:24.215] I0112 01:07:24.122653   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255241-28991", Name:"nginx-74d9fbb98", UID:"6b87256d-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"673", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74d9fbb98-sr8q9
W0112 01:07:24.215] I0112 01:07:24.124492   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255241-28991", Name:"nginx-74d9fbb98", UID:"6b87256d-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"673", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74d9fbb98-n7cfm
I0112 01:07:24.316] test-cmd-util.sh:1160: Successful get deployment nginx {{.metadata.name}}: nginx
I0112 01:07:28.444] (BSuccessful
I0112 01:07:28.445] message:Error from server (Conflict): error when applying patch:
I0112 01:07:28.445] {"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1547255241-28991\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}}
I0112 01:07:28.445] to:
I0112 01:07:28.446] Resource: "extensions/v1beta1, Resource=deployments", GroupVersionKind: "extensions/v1beta1, Kind=Deployment"
I0112 01:07:28.446] Name: "nginx", Namespace: "namespace-1547255241-28991"
I0112 01:07:28.447] Object: &{map["kind":"Deployment" "apiVersion":"extensions/v1beta1" "metadata":map["uid":"6b86a3a3-1606-11e9-8033-0242ac110002" "creationTimestamp":"2019-01-12T01:07:24Z" "labels":map["name":"nginx"] "namespace":"namespace-1547255241-28991" "selfLink":"/apis/extensions/v1beta1/namespaces/namespace-1547255241-28991/deployments/nginx" "generation":'\x01' "annotations":map["deployment.kubernetes.io/revision":"1" "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1547255241-28991\"},\"spec\":{\"replicas\":3,\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx1\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"] "name":"nginx" "resourceVersion":"685"] "spec":map["replicas":'\x03' "selector":map["matchLabels":map["name":"nginx1"]] "template":map["metadata":map["creationTimestamp":<nil> "labels":map["name":"nginx1"]] "spec":map["restartPolicy":"Always" "terminationGracePeriodSeconds":'\x1e' "dnsPolicy":"ClusterFirst" "securityContext":map[] "schedulerName":"default-scheduler" "containers":[map["terminationMessagePath":"/dev/termination-log" "terminationMessagePolicy":"File" "imagePullPolicy":"IfNotPresent" "name":"nginx" "image":"k8s.gcr.io/nginx:test-cmd" "ports":[map["containerPort":'P' "protocol":"TCP"]] "resources":map[]]]]] "strategy":map["rollingUpdate":map["maxUnavailable":'\x01' "maxSurge":'\x01'] "type":"RollingUpdate"] "revisionHistoryLimit":'\n' "progressDeadlineSeconds":'\u0258'] "status":map["updatedReplicas":'\x03' "unavailableReplicas":'\x03' "conditions":[map["type":"Available" "status":"False" "lastUpdateTime":"2019-01-12T01:07:24Z" "lastTransitionTime":"2019-01-12T01:07:24Z" "reason":"MinimumReplicasUnavailable" "message":"Deployment does not have minimum availability."] map["message":"ReplicaSet \"nginx-74d9fbb98\" is progressing." "type":"Progressing" "status":"True" "lastUpdateTime":"2019-01-12T01:07:24Z" "lastTransitionTime":"2019-01-12T01:07:24Z" "reason":"ReplicaSetUpdated"]] "observedGeneration":'\x01' "replicas":'\x03']]}
I0112 01:07:28.447] for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.extensions "nginx": the object has been modified; please apply your changes to the latest version and try again
I0112 01:07:28.448] has:Error from server (Conflict)
W0112 01:07:28.548] I0112 01:07:27.506937   72784 horizontal.go:366] Horizontal Pod Autoscaler has been deleted namespace-1547255215-30322/frontend
W0112 01:07:32.677] E0112 01:07:32.676918   72784 replica_set.go:450] Sync "namespace-1547255241-28991/nginx-74d9fbb98" failed with Operation cannot be fulfilled on replicasets.apps "nginx-74d9fbb98": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1547255241-28991/nginx-74d9fbb98, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 6b87256d-1606-11e9-8033-0242ac110002, UID in object meta: 
I0112 01:07:33.663] deployment.extensions/nginx configured
W0112 01:07:33.764] I0112 01:07:33.665995   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255241-28991", Name:"nginx", UID:"71379669-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"707", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-d7576cc9 to 3
W0112 01:07:33.764] I0112 01:07:33.668817   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255241-28991", Name:"nginx-d7576cc9", UID:"71382f58-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"708", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-d7576cc9-g74zv
W0112 01:07:33.765] I0112 01:07:33.672109   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255241-28991", Name:"nginx-d7576cc9", UID:"71382f58-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"708", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-d7576cc9-4wm8v
W0112 01:07:33.765] I0112 01:07:33.672467   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255241-28991", Name:"nginx-d7576cc9", UID:"71382f58-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"708", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-d7576cc9-hfgnd
I0112 01:07:33.865] Successful
... skipping 147 lines ...
I0112 01:07:41.351] namespace/namespace-1547255261-15277 created
I0112 01:07:41.440] Context "test" modified.
I0112 01:07:41.448] +++ [0112 01:07:41] Testing kubectl get
I0112 01:07:41.563] test-cmd-util.sh:1502: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:41.665] (BSuccessful
I0112 01:07:41.666] message:No resources found.
I0112 01:07:41.666] Error from server (NotFound): pods "abc" not found
I0112 01:07:41.666] has:pods "abc" not found
I0112 01:07:41.772] test-cmd-util.sh:1510: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:41.878] (BSuccessful
I0112 01:07:41.878] message:Error from server (NotFound): pods "abc" not found
I0112 01:07:41.878] has:pods "abc" not found
I0112 01:07:41.989] test-cmd-util.sh:1518: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:42.091] (BSuccessful
I0112 01:07:42.092] message:{
I0112 01:07:42.092]     "apiVersion": "v1",
I0112 01:07:42.092]     "items": [],
... skipping 33 lines ...
I0112 01:07:42.950] has not:No resources found
I0112 01:07:43.052] Successful
I0112 01:07:43.052] message:No resources found.
I0112 01:07:43.053] has:No resources found
I0112 01:07:43.221] test-cmd-util.sh:1562: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:43.289] (BSuccessful
I0112 01:07:43.289] message:Error from server (NotFound): pods "abc" not found
I0112 01:07:43.290] has:pods "abc" not found
I0112 01:07:43.291] FAIL!
I0112 01:07:43.291] message:Error from server (NotFound): pods "abc" not found
I0112 01:07:43.292] has not:List
I0112 01:07:43.292] 1568 /go/src/k8s.io/kubernetes/hack/make-rules/test-cmd-util.sh
I0112 01:07:43.428] Successful
I0112 01:07:43.429] message:I0112 01:07:43.371699   84630 loader.go:359] Config loaded from file /tmp/tmp.en6w4d8pLX/.kube/config
I0112 01:07:43.429] I0112 01:07:43.372208   84630 loader.go:359] Config loaded from file /tmp/tmp.en6w4d8pLX/.kube/config
I0112 01:07:43.429] I0112 01:07:43.373716   84630 round_trippers.go:405] GET http://127.0.0.1:8080/version?timeout=32s 200 OK in 1 milliseconds
... skipping 991 lines ...
I0112 01:07:47.100]     }
I0112 01:07:47.100] }
I0112 01:07:47.204] test-cmd-util.sh:1621: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:07:47.511] (B<no value>Successful
I0112 01:07:47.511] message:valid-pod:
I0112 01:07:47.511] has:valid-pod:
W0112 01:07:47.625] error: error executing jsonpath "{.missing}": missing is not found
I0112 01:07:47.725] Successful
I0112 01:07:47.726] message:Error executing template: missing is not found. Printing more information for debugging the template:
I0112 01:07:47.726] 	template was:
I0112 01:07:47.726] 		{.missing}
I0112 01:07:47.726] 	object given to jsonpath engine was:
I0112 01:07:47.727] 		map[string]interface {}{"apiVersion":"v1", "metadata":map[string]interface {}{"namespace":"namespace-1547255266-32711", "selfLink":"/api/v1/namespaces/namespace-1547255266-32711/pods/valid-pod", "uid":"792643d0-1606-11e9-8033-0242ac110002", "resourceVersion":"779", "creationTimestamp":"2019-01-12T01:07:46Z", "labels":map[string]interface {}{"name":"valid-pod"}, "name":"valid-pod"}, "spec":map[string]interface {}{"schedulerName":"default-scheduler", "priority":0, "containers":[]interface {}{map[string]interface {}{"name":"kubernetes-serve-hostname", "image":"k8s.gcr.io/serve_hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File", "imagePullPolicy":"Always"}}, "restartPolicy":"Always", "terminationGracePeriodSeconds":30, "dnsPolicy":"ClusterFirst", "securityContext":map[string]interface {}{}}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}, "kind":"Pod"}
I0112 01:07:47.727] has:missing is not found
I0112 01:07:47.755] Successful
I0112 01:07:47.756] message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template:
I0112 01:07:47.756] 	template was:
I0112 01:07:47.756] 		{{.missing}}
I0112 01:07:47.756] 	raw data was:
I0112 01:07:47.757] 		{"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2019-01-12T01:07:46Z","labels":{"name":"valid-pod"},"name":"valid-pod","namespace":"namespace-1547255266-32711","resourceVersion":"779","selfLink":"/api/v1/namespaces/namespace-1547255266-32711/pods/valid-pod","uid":"792643d0-1606-11e9-8033-0242ac110002"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}}
I0112 01:07:47.757] 	object given to template engine was:
I0112 01:07:47.757] 		map[apiVersion:v1 kind:Pod metadata:map[name:valid-pod namespace:namespace-1547255266-32711 resourceVersion:779 selfLink:/api/v1/namespaces/namespace-1547255266-32711/pods/valid-pod uid:792643d0-1606-11e9-8033-0242ac110002 creationTimestamp:2019-01-12T01:07:46Z labels:map[name:valid-pod]] spec:map[terminationGracePeriodSeconds:30 containers:[map[imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[requests:map[cpu:1 memory:512Mi] limits:map[memory:512Mi cpu:1]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File image:k8s.gcr.io/serve_hostname]] dnsPolicy:ClusterFirst priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[]] status:map[phase:Pending qosClass:Guaranteed]]
I0112 01:07:47.758] has:map has no entry for key "missing"
W0112 01:07:47.858] error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing"
W0112 01:07:48.877] E0112 01:07:48.876591   84970 streamwatcher.go:109] Unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)
I0112 01:07:48.977] Successful
I0112 01:07:48.978] message:NAME        READY     STATUS    RESTARTS   AGE
I0112 01:07:48.978] valid-pod   0/1       Pending   0          1s
I0112 01:07:48.978] has:STATUS
I0112 01:07:48.978] Successful
... skipping 78 lines ...
I0112 01:07:51.215]   terminationGracePeriodSeconds: 30
I0112 01:07:51.215] status:
I0112 01:07:51.215]   phase: Pending
I0112 01:07:51.215]   qosClass: Guaranteed
I0112 01:07:51.215] has:name: valid-pod
I0112 01:07:51.229] Successful
I0112 01:07:51.229] message:Error from server (NotFound): pods "invalid-pod" not found
I0112 01:07:51.230] has:"invalid-pod" not found
I0112 01:07:51.336] pod "valid-pod" deleted
I0112 01:07:51.462] test-cmd-util.sh:1659: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:07:51.677] (Bpod/redis-master created
I0112 01:07:51.681] pod/valid-pod created
I0112 01:07:51.807] Successful
... skipping 237 lines ...
I0112 01:07:53.245] namespace-1547255261-15277   12s
I0112 01:07:53.245] namespace-1547255266-32711   7s
I0112 01:07:53.246] namespace-1547255272-29085   1s
I0112 01:07:53.246] has:application/json
W0112 01:07:53.402] I0112 01:07:53.401878   68585 controller.go:597] quota admission added evaluator for: {extensions daemonsets}
W0112 01:07:53.419] I0112 01:07:53.418850   68585 controller.go:597] quota admission added evaluator for: {apps controllerrevisions}
W0112 01:07:53.424] I0112 01:07:53.423460   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255272-29085", Name:"bind", UID:"7cfc038a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"796", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:07:53.424] I0112 01:07:53.423589   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255272-29085", Name:"bind", UID:"7cfc038a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"796", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:07:53.425] I0112 01:07:53.423740   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255272-29085", Name:"bind", UID:"7cfc038a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"796", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:07:53.435] I0112 01:07:53.434309   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255272-29085", Name:"bind", UID:"7cfc038a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"798", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:07:53.435] I0112 01:07:53.434452   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255272-29085", Name:"bind", UID:"7cfc038a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"798", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:07:53.435] I0112 01:07:53.434526   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255272-29085", Name:"bind", UID:"7cfc038a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"798", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:07:53.536] daemonset.extensions/bind created
I0112 01:07:53.538] test-cmd-util.sh:1404: Successful get ds {{range.items}}{{.metadata.name}}:{{end}}: bind:
I0112 01:07:53.765] (BSuccessful
I0112 01:07:53.765] message:NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR
I0112 01:07:53.765] bind 1 0 0 0 0 <none>
I0112 01:07:53.766] has:NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR
... skipping 57 lines ...
I0112 01:08:02.547] 
I0112 01:08:02.550] +++ Running case: test-cmd.run_create_secret_tests 
I0112 01:08:02.554] +++ working dir: /go/src/k8s.io/kubernetes
I0112 01:08:02.557] +++ command: run_create_secret_tests
I0112 01:08:02.662] Successful
I0112 01:08:02.663] message:No resources found.
I0112 01:08:02.663] Error from server (NotFound): secrets "mysecret" not found
I0112 01:08:02.663] has:secrets "mysecret" not found
I0112 01:08:02.873] Successful
I0112 01:08:02.874] message:No resources found.
I0112 01:08:02.874] Error from server (NotFound): secrets "mysecret" not found
I0112 01:08:02.874] has:secrets "mysecret" not found
I0112 01:08:02.875] Successful
I0112 01:08:02.876] message:user-specified
I0112 01:08:02.876] has:user-specified
I0112 01:08:02.971] Successful
I0112 01:08:03.071] {"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-create-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-create-cm","uid":"82beed81-1606-11e9-8033-0242ac110002","resourceVersion":"868","creationTimestamp":"2019-01-12T01:08:03Z"}}
... skipping 246 lines ...
I0112 01:08:11.181] foo.company.com/test patched
I0112 01:08:11.298] test-cmd-util.sh:2143: Successful get foos/test {{.patched}}: value1
I0112 01:08:11.395] (Bfoo.company.com/test patched
I0112 01:08:11.521] test-cmd-util.sh:2145: Successful get foos/test {{.patched}}: value2
I0112 01:08:11.622] (Bfoo.company.com/test patched
I0112 01:08:11.727] test-cmd-util.sh:2147: Successful get foos/test {{.patched}}: <no value>
I0112 01:08:11.933] (B+++ [0112 01:08:11] "kubectl patch --local" returns error as expected for CustomResource: error: cannot apply strategic merge patch for company.com/v1, Kind=Foo locally, try --type merge
I0112 01:08:12.014] {
I0112 01:08:12.014]     "apiVersion": "company.com/v1",
I0112 01:08:12.014]     "kind": "Foo",
I0112 01:08:12.014]     "metadata": {
I0112 01:08:12.014]         "annotations": {
I0112 01:08:12.015]             "kubernetes.io/change-cause": "kubectl patch foos/test --server=http://127.0.0.1:8080 --match-server-version=true --patch={\"patched\":null} --type=merge --record=true"
... skipping 111 lines ...
I0112 01:08:13.601] has:bar.company.com/test
I0112 01:08:13.693] bar.company.com "test" deleted
W0112 01:08:13.794] /go/src/k8s.io/kubernetes/hack/lib/test.sh: line 264: 87439 Killed                  while [ ${tries} -lt 10 ]; do
W0112 01:08:13.794]     tries=$((tries+1)); kubectl "${kube_flags[@]}" patch bars/test -p "{\"patched\":\"${tries}\"}" --type=merge; sleep 1;
W0112 01:08:13.794] done
W0112 01:08:13.795] /go/src/k8s.io/kubernetes/hack/make-rules/test-cmd-util.sh: line 2201: 87438 Killed                  kubectl "${kube_flags[@]}" get bars --request-timeout=1m --watch-only -o name
W0112 01:08:28.304] E0112 01:08:28.303334   72784 resource_quota_controller.go:460] failed to sync resource monitors: [couldn't start monitor for resource {"company.com" "v1" "bars"}: unable to monitor quota for resource "company.com/v1, Resource=bars", couldn't start monitor for resource {"mygroup.example.com" "v1alpha1" "resources"}: unable to monitor quota for resource "mygroup.example.com/v1alpha1, Resource=resources", couldn't start monitor for resource {"company.com" "v1" "foos"}: unable to monitor quota for resource "company.com/v1, Resource=foos", couldn't start monitor for resource {"company.com" "v1" "validfoos"}: unable to monitor quota for resource "company.com/v1, Resource=validfoos"]
W0112 01:08:28.459] I0112 01:08:28.458973   72784 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
W0112 01:08:28.560] I0112 01:08:28.559337   72784 controller_utils.go:1032] Caches are synced for garbage collector controller
I0112 01:08:28.690] test-cmd-util.sh:2227: Successful get bars {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:28.877] (Bfoo.company.com/test created
I0112 01:08:29.005] test-cmd-util.sh:2233: Successful get foos {{range.items}}{{.metadata.name}}:{{end}}: test:
I0112 01:08:29.130] (Btest-cmd-util.sh:2236: Successful get foos/test {{.someField}}: field1
... skipping 58 lines ...
I0112 01:08:35.890] bar.company.com/test created
I0112 01:08:36.022] test-cmd-util.sh:2362: Successful get bars {{len .items}}: 1
I0112 01:08:36.121] (Bnamespace "non-native-resources" deleted
I0112 01:08:41.392] test-cmd-util.sh:2365: Successful get bars {{len .items}}: 0
I0112 01:08:41.593] (Bcustomresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted
W0112 01:08:41.694] No resources found.
W0112 01:08:41.694] Error from server (NotFound): namespaces "non-native-resources" not found
I0112 01:08:41.795] customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted
I0112 01:08:41.832] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
I0112 01:08:41.949] customresourcedefinition.apiextensions.k8s.io "validfoos.company.com" deleted
I0112 01:08:42.008] +++ exit code: 0
I0112 01:08:42.110] Recording: run_cmd_with_img_tests
I0112 01:08:42.110] Running command: run_cmd_with_img_tests
... skipping 9 lines ...
W0112 01:08:42.452] I0112 01:08:42.451389   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-17984", Name:"test1-7f54676899", UID:"9a374199-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"985", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test1-7f54676899-zptm8
I0112 01:08:42.552] Successful
I0112 01:08:42.553] message:deployment.apps/test1 created
I0112 01:08:42.553] has:deployment.apps/test1 created
I0112 01:08:42.556] deployment.extensions "test1" deleted
I0112 01:08:42.653] Successful
I0112 01:08:42.653] message:error: Invalid image name "InvalidImageName": invalid reference format
I0112 01:08:42.653] has:error: Invalid image name "InvalidImageName": invalid reference format
I0112 01:08:42.670] +++ exit code: 0
I0112 01:08:42.717] Recording: run_recursive_resources_tests
I0112 01:08:42.718] Running command: run_recursive_resources_tests
I0112 01:08:42.740] 
I0112 01:08:42.742] +++ Running case: test-cmd.run_recursive_resources_tests 
I0112 01:08:42.745] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 4 lines ...
I0112 01:08:42.944] Context "test" modified.
I0112 01:08:43.060] test-cmd-util.sh:2385: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:43.367] (Btest-cmd-util.sh:2389: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:43.370] (BSuccessful
I0112 01:08:43.370] message:pod/busybox0 created
I0112 01:08:43.370] pod/busybox1 created
I0112 01:08:43.370] error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0112 01:08:43.370] has:error validating data: kind not set
I0112 01:08:43.484] test-cmd-util.sh:2394: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:43.698] (Btest-cmd-util.sh:2402: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox:
I0112 01:08:43.700] (BSuccessful
I0112 01:08:43.700] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:43.700] has:Object 'Kind' is missing
I0112 01:08:43.809] test-cmd-util.sh:2409: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:44.106] (Btest-cmd-util.sh:2413: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I0112 01:08:44.108] (BSuccessful
I0112 01:08:44.108] message:pod/busybox0 replaced
I0112 01:08:44.108] pod/busybox1 replaced
I0112 01:08:44.108] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0112 01:08:44.108] has:error validating data: kind not set
I0112 01:08:44.221] test-cmd-util.sh:2418: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:44.341] (BSuccessful
I0112 01:08:44.342] message:Name:               busybox0
I0112 01:08:44.342] Namespace:          namespace-1547255322-7294
I0112 01:08:44.342] Priority:           0
I0112 01:08:44.342] PriorityClassName:  <none>
... skipping 159 lines ...
I0112 01:08:44.358] has:Object 'Kind' is missing
I0112 01:08:44.454] test-cmd-util.sh:2428: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:44.675] (Btest-cmd-util.sh:2432: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue:
I0112 01:08:44.677] (BSuccessful
I0112 01:08:44.678] message:pod/busybox0 annotated
I0112 01:08:44.678] pod/busybox1 annotated
I0112 01:08:44.678] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:44.678] has:Object 'Kind' is missing
I0112 01:08:44.790] test-cmd-util.sh:2437: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:45.111] (Btest-cmd-util.sh:2441: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I0112 01:08:45.113] (BSuccessful
I0112 01:08:45.114] message:Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I0112 01:08:45.114] pod/busybox0 configured
I0112 01:08:45.114] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I0112 01:08:45.114] pod/busybox1 configured
I0112 01:08:45.115] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0112 01:08:45.115] has:error validating data: kind not set
I0112 01:08:45.227] test-cmd-util.sh:2447: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:45.406] (Bdeployment.extensions/nginx created
W0112 01:08:45.507] I0112 01:08:45.409961   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255322-7294", Name:"nginx", UID:"9bfad7c4-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1010", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-794c6b99b4 to 3
W0112 01:08:45.508] I0112 01:08:45.413480   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx-794c6b99b4", UID:"9bfb6f9c-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1011", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-794c6b99b4-82j92
W0112 01:08:45.508] I0112 01:08:45.415011   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx-794c6b99b4", UID:"9bfb6f9c-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1011", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-794c6b99b4-8jrlg
W0112 01:08:45.508] I0112 01:08:45.416374   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx-794c6b99b4", UID:"9bfb6f9c-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1011", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-794c6b99b4-k6rjn
... skipping 42 lines ...
I0112 01:08:45.893] status: {}
I0112 01:08:45.893] has:apps/v1beta1
I0112 01:08:45.991] deployment.extensions "nginx" deleted
I0112 01:08:46.125] test-cmd-util.sh:2463: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:46.343] (Btest-cmd-util.sh:2467: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:46.345] (BSuccessful
I0112 01:08:46.346] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:46.346] has:Object 'Kind' is missing
W0112 01:08:46.446] I0112 01:08:46.271984   72784 namespace_controller.go:171] Namespace has been deleted non-native-resources
I0112 01:08:46.547] test-cmd-util.sh:2472: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:46.560] (BSuccessful
I0112 01:08:46.561] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:46.561] has:busybox0:busybox1:
I0112 01:08:46.563] Successful
I0112 01:08:46.563] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:46.563] has:Object 'Kind' is missing
I0112 01:08:46.676] test-cmd-util.sh:2481: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:46.784] (Bpod/busybox0 labeled pod/busybox1 labeled error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:46.902] test-cmd-util.sh:2486: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue:
I0112 01:08:46.905] (BSuccessful
I0112 01:08:46.905] message:pod/busybox0 labeled
I0112 01:08:46.905] pod/busybox1 labeled
I0112 01:08:46.905] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:46.906] has:Object 'Kind' is missing
I0112 01:08:47.016] test-cmd-util.sh:2491: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:47.123] (Bpod/busybox0 patched pod/busybox1 patched error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:47.241] test-cmd-util.sh:2496: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox:
I0112 01:08:47.244] (BSuccessful
I0112 01:08:47.245] message:pod/busybox0 patched
I0112 01:08:47.245] pod/busybox1 patched
I0112 01:08:47.245] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:47.246] has:Object 'Kind' is missing
I0112 01:08:47.361] test-cmd-util.sh:2501: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:47.599] (Btest-cmd-util.sh:2505: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:47.602] (BSuccessful
I0112 01:08:47.603] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0112 01:08:47.603] pod "busybox0" force deleted
I0112 01:08:47.603] pod "busybox1" force deleted
I0112 01:08:47.603] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0112 01:08:47.603] has:Object 'Kind' is missing
I0112 01:08:47.714] test-cmd-util.sh:2510: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:47.885] (Breplicationcontroller/busybox0 created
I0112 01:08:47.889] replicationcontroller/busybox1 created
W0112 01:08:47.990] I0112 01:08:47.889126   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255322-7294", Name:"busybox0", UID:"9d751843-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1041", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-zvxdg
W0112 01:08:47.990] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0112 01:08:47.991] I0112 01:08:47.892382   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255322-7294", Name:"busybox1", UID:"9d75dbdb-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1043", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-lbqd6
I0112 01:08:48.091] test-cmd-util.sh:2514: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:48.140] (Btest-cmd-util.sh:2519: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:48.262] (Btest-cmd-util.sh:2520: Successful get rc busybox0 {{.spec.replicas}}: 1
I0112 01:08:48.387] (Btest-cmd-util.sh:2521: Successful get rc busybox1 {{.spec.replicas}}: 1
I0112 01:08:48.608] (Btest-cmd-util.sh:2526: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I0112 01:08:48.724] (Btest-cmd-util.sh:2527: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I0112 01:08:48.727] (BSuccessful
I0112 01:08:48.728] message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled
I0112 01:08:48.728] horizontalpodautoscaler.autoscaling/busybox1 autoscaled
I0112 01:08:48.728] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:48.728] has:Object 'Kind' is missing
I0112 01:08:48.829] horizontalpodautoscaler.autoscaling "busybox0" deleted
I0112 01:08:48.947] horizontalpodautoscaler.autoscaling "busybox1" deleted
I0112 01:08:49.074] test-cmd-util.sh:2535: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:49.188] (Btest-cmd-util.sh:2536: Successful get rc busybox0 {{.spec.replicas}}: 1
I0112 01:08:49.297] (Btest-cmd-util.sh:2537: Successful get rc busybox1 {{.spec.replicas}}: 1
I0112 01:08:49.530] (Btest-cmd-util.sh:2541: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I0112 01:08:49.651] (Btest-cmd-util.sh:2542: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I0112 01:08:49.653] (BSuccessful
I0112 01:08:49.654] message:service/busybox0 exposed
I0112 01:08:49.654] service/busybox1 exposed
I0112 01:08:49.654] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:49.654] has:Object 'Kind' is missing
I0112 01:08:49.772] test-cmd-util.sh:2548: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:49.891] (Btest-cmd-util.sh:2549: Successful get rc busybox0 {{.spec.replicas}}: 1
I0112 01:08:50.026] (Btest-cmd-util.sh:2550: Successful get rc busybox1 {{.spec.replicas}}: 1
I0112 01:08:50.257] (Btest-cmd-util.sh:2554: Successful get rc busybox0 {{.spec.replicas}}: 2
I0112 01:08:50.370] (Btest-cmd-util.sh:2555: Successful get rc busybox1 {{.spec.replicas}}: 2
I0112 01:08:50.372] (BSuccessful
I0112 01:08:50.373] message:replicationcontroller/busybox0 scaled
I0112 01:08:50.373] replicationcontroller/busybox1 scaled
I0112 01:08:50.373] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:50.373] has:Object 'Kind' is missing
W0112 01:08:50.474] I0112 01:08:50.136558   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255322-7294", Name:"busybox0", UID:"9d751843-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1062", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-sqjkr
W0112 01:08:50.474] I0112 01:08:50.146375   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255322-7294", Name:"busybox1", UID:"9d75dbdb-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1066", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-jvbxr
I0112 01:08:50.575] test-cmd-util.sh:2560: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:50.697] (Btest-cmd-util.sh:2564: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:50.700] (BSuccessful
I0112 01:08:50.700] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0112 01:08:50.700] replicationcontroller "busybox0" force deleted
I0112 01:08:50.700] replicationcontroller "busybox1" force deleted
I0112 01:08:50.701] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:50.701] has:Object 'Kind' is missing
I0112 01:08:50.805] test-cmd-util.sh:2569: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:50.961] (Bdeployment.extensions/nginx1-deployment created
I0112 01:08:50.967] deployment.extensions/nginx0-deployment created
W0112 01:08:51.068] I0112 01:08:50.964878   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255322-7294", Name:"nginx1-deployment", UID:"9f4a8efc-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1082", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx1-deployment-5dc485c78 to 2
W0112 01:08:51.068] error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0112 01:08:51.068] I0112 01:08:50.967667   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx1-deployment-5dc485c78", UID:"9f4b163b-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1083", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-5dc485c78-z9kqq
W0112 01:08:51.069] I0112 01:08:50.968344   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255322-7294", Name:"nginx0-deployment", UID:"9f4b3820-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1084", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx0-deployment-76db6cfd79 to 2
W0112 01:08:51.069] I0112 01:08:50.970374   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx1-deployment-5dc485c78", UID:"9f4b163b-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1083", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-5dc485c78-7p9xd
W0112 01:08:51.069] I0112 01:08:50.971248   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx0-deployment-76db6cfd79", UID:"9f4b9b34-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1087", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-76db6cfd79-nqnkd
W0112 01:08:51.070] I0112 01:08:50.973389   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255322-7294", Name:"nginx0-deployment-76db6cfd79", UID:"9f4b9b34-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1087", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-76db6cfd79-6dlr4
I0112 01:08:51.170] test-cmd-util.sh:2573: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment:
I0112 01:08:51.197] (Btest-cmd-util.sh:2574: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I0112 01:08:51.590] (Btest-cmd-util.sh:2578: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I0112 01:08:51.593] (BSuccessful
I0112 01:08:51.593] message:deployment.extensions/nginx1-deployment
I0112 01:08:51.593] deployment.extensions/nginx0-deployment
I0112 01:08:51.594] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:51.594] has:Object 'Kind' is missing
W0112 01:08:51.694] I0112 01:08:51.308313   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255322-7294", Name:"nginx1-deployment", UID:"9f4a8efc-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1107", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackTemplateUnchanged' The rollback revision contains the same template as current deployment "nginx1-deployment"
W0112 01:08:51.695] I0112 01:08:51.380748   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255322-7294", Name:"nginx0-deployment", UID:"9f4b3820-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1112", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackTemplateUnchanged' The rollback revision contains the same template as current deployment "nginx0-deployment"
I0112 01:08:51.795] deployment.extensions/nginx1-deployment paused
I0112 01:08:51.796] deployment.extensions/nginx0-deployment paused
I0112 01:08:51.836] test-cmd-util.sh:2585: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true:
I0112 01:08:51.838] (BSuccessful
I0112 01:08:51.838] message:error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:51.839] has:Object 'Kind' is missing
I0112 01:08:51.949] deployment.extensions/nginx1-deployment resumed
I0112 01:08:51.953] deployment.extensions/nginx0-deployment resumed
I0112 01:08:52.073] test-cmd-util.sh:2591: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: <no value>:<no value>:
I0112 01:08:52.076] (BSuccessful
I0112 01:08:52.076] message:error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:52.076] has:Object 'Kind' is missing
I0112 01:08:52.209] Successful
I0112 01:08:52.209] message:deployments "nginx1-deployment"
I0112 01:08:52.209] REVISION  CHANGE-CAUSE
I0112 01:08:52.209] 1         <none>
I0112 01:08:52.210] 
I0112 01:08:52.210] deployments "nginx0-deployment"
I0112 01:08:52.210] REVISION  CHANGE-CAUSE
I0112 01:08:52.210] 1         <none>
I0112 01:08:52.210] 
I0112 01:08:52.210] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:52.211] has:nginx0-deployment
I0112 01:08:52.212] Successful
I0112 01:08:52.212] message:deployments "nginx1-deployment"
I0112 01:08:52.212] REVISION  CHANGE-CAUSE
I0112 01:08:52.212] 1         <none>
I0112 01:08:52.212] 
I0112 01:08:52.212] deployments "nginx0-deployment"
I0112 01:08:52.212] REVISION  CHANGE-CAUSE
I0112 01:08:52.212] 1         <none>
I0112 01:08:52.212] 
I0112 01:08:52.213] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:52.213] has:nginx1-deployment
I0112 01:08:52.214] Successful
I0112 01:08:52.215] message:deployments "nginx1-deployment"
I0112 01:08:52.215] REVISION  CHANGE-CAUSE
I0112 01:08:52.215] 1         <none>
I0112 01:08:52.215] 
I0112 01:08:52.215] deployments "nginx0-deployment"
I0112 01:08:52.215] REVISION  CHANGE-CAUSE
I0112 01:08:52.215] 1         <none>
I0112 01:08:52.215] 
I0112 01:08:52.216] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:52.216] has:Object 'Kind' is missing
I0112 01:08:52.314] deployment.extensions "nginx1-deployment" force deleted
I0112 01:08:52.318] deployment.extensions "nginx0-deployment" force deleted
W0112 01:08:52.419] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0112 01:08:52.420] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0112 01:08:53.432] test-cmd-util.sh:2607: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:08:53.604] (Breplicationcontroller/busybox0 created
I0112 01:08:53.609] replicationcontroller/busybox1 created
W0112 01:08:53.709] I0112 01:08:53.607460   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255322-7294", Name:"busybox0", UID:"a0ddd1e3-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1141", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-8g6j6
W0112 01:08:53.710] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0112 01:08:53.710] I0112 01:08:53.611674   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255322-7294", Name:"busybox1", UID:"a0de8d2c-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1143", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-5zdlg
I0112 01:08:53.811] test-cmd-util.sh:2611: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0112 01:08:53.844] (BSuccessful
I0112 01:08:53.845] message:no rollbacker has been implemented for {"" "ReplicationController"}
I0112 01:08:53.845] no rollbacker has been implemented for {"" "ReplicationController"}
I0112 01:08:53.845] unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
... skipping 2 lines ...
I0112 01:08:53.847] message:no rollbacker has been implemented for {"" "ReplicationController"}
I0112 01:08:53.847] no rollbacker has been implemented for {"" "ReplicationController"}
I0112 01:08:53.847] unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:53.847] has:Object 'Kind' is missing
I0112 01:08:53.960] Successful
I0112 01:08:53.960] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:53.960] error: replicationcontrollers "busybox0" pausing is not supported
I0112 01:08:53.961] error: replicationcontrollers "busybox1" pausing is not supported
I0112 01:08:53.961] has:Object 'Kind' is missing
I0112 01:08:53.962] Successful
I0112 01:08:53.963] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:53.963] error: replicationcontrollers "busybox0" pausing is not supported
I0112 01:08:53.963] error: replicationcontrollers "busybox1" pausing is not supported
I0112 01:08:53.963] has:replicationcontrollers "busybox0" pausing is not supported
I0112 01:08:53.965] Successful
I0112 01:08:53.966] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:53.966] error: replicationcontrollers "busybox0" pausing is not supported
I0112 01:08:53.966] error: replicationcontrollers "busybox1" pausing is not supported
I0112 01:08:53.966] has:replicationcontrollers "busybox1" pausing is not supported
I0112 01:08:54.078] Successful
I0112 01:08:54.078] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:54.079] error: replicationcontrollers "busybox0" resuming is not supported
I0112 01:08:54.079] error: replicationcontrollers "busybox1" resuming is not supported
I0112 01:08:54.079] has:Object 'Kind' is missing
I0112 01:08:54.080] Successful
I0112 01:08:54.081] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:54.081] error: replicationcontrollers "busybox0" resuming is not supported
I0112 01:08:54.081] error: replicationcontrollers "busybox1" resuming is not supported
I0112 01:08:54.081] has:replicationcontrollers "busybox0" resuming is not supported
I0112 01:08:54.083] Successful
I0112 01:08:54.083] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:54.083] error: replicationcontrollers "busybox0" resuming is not supported
I0112 01:08:54.084] error: replicationcontrollers "busybox1" resuming is not supported
I0112 01:08:54.084] has:replicationcontrollers "busybox0" resuming is not supported
I0112 01:08:54.177] replicationcontroller "busybox0" force deleted
I0112 01:08:54.182] replicationcontroller "busybox1" force deleted
W0112 01:08:54.283] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0112 01:08:54.284] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0112 01:08:55.208] +++ exit code: 0
I0112 01:08:55.264] Recording: run_namespace_tests
I0112 01:08:55.265] Running command: run_namespace_tests
I0112 01:08:55.288] 
I0112 01:08:55.290] +++ Running case: test-cmd.run_namespace_tests 
I0112 01:08:55.293] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 6 lines ...
W0112 01:08:58.410] I0112 01:08:58.409945   72784 controller_utils.go:1032] Caches are synced for resource quota controller
W0112 01:08:58.585] I0112 01:08:58.584633   72784 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
W0112 01:08:58.685] I0112 01:08:58.685007   72784 controller_utils.go:1032] Caches are synced for garbage collector controller
I0112 01:09:00.755] namespace/my-namespace condition met
I0112 01:09:00.871] Successful
I0112 01:09:00.871] message:No resources found.
I0112 01:09:00.871] Error from server (NotFound): namespaces "my-namespace" not found
I0112 01:09:00.872] has: not found
I0112 01:09:01.023] test-cmd-util.sh:2665: Successful get namespaces {{range.items}}{{ if eq $id_field \"other\" }}found{{end}}{{end}}:: :
I0112 01:09:01.121] (Bnamespace/other created
I0112 01:09:01.251] test-cmd-util.sh:2669: Successful get namespaces/other {{.metadata.name}}: other
I0112 01:09:01.380] (Btest-cmd-util.sh:2673: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:09:01.578] (Bpod/valid-pod created
I0112 01:09:01.718] test-cmd-util.sh:2677: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:09:01.853] (Btest-cmd-util.sh:2679: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:09:01.975] (BSuccessful
I0112 01:09:01.975] message:error: a resource cannot be retrieved by name across all namespaces
I0112 01:09:01.975] has:a resource cannot be retrieved by name across all namespaces
I0112 01:09:02.106] test-cmd-util.sh:2686: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0112 01:09:02.224] (Bpod "valid-pod" force deleted
W0112 01:09:02.325] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0112 01:09:02.426] test-cmd-util.sh:2690: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:09:02.482] (Bnamespace "other" deleted
... skipping 112 lines ...
I0112 01:09:24.002] +++ command: run_client_config_tests
I0112 01:09:24.016] +++ [0112 01:09:24] Creating namespace namespace-1547255364-3017
I0112 01:09:24.104] namespace/namespace-1547255364-3017 created
I0112 01:09:24.193] Context "test" modified.
I0112 01:09:24.201] +++ [0112 01:09:24] Testing client config
I0112 01:09:24.289] Successful
I0112 01:09:24.289] message:error: stat missing: no such file or directory
I0112 01:09:24.289] has:missing: no such file or directory
I0112 01:09:24.378] Successful
I0112 01:09:24.379] message:error: stat missing: no such file or directory
I0112 01:09:24.379] has:missing: no such file or directory
I0112 01:09:24.464] Successful
I0112 01:09:24.465] message:error: stat missing: no such file or directory
I0112 01:09:24.465] has:missing: no such file or directory
I0112 01:09:24.550] Successful
I0112 01:09:24.551] message:Error in configuration: context was not found for specified context: missing-context
I0112 01:09:24.551] has:context was not found for specified context: missing-context
I0112 01:09:24.638] Successful
I0112 01:09:24.638] message:error: no server found for cluster "missing-cluster"
I0112 01:09:24.638] has:no server found for cluster "missing-cluster"
I0112 01:09:24.727] Successful
I0112 01:09:24.727] message:auth info "missing-user" does not exist
I0112 01:09:24.727] auth info "missing-user" does not exist
I0112 01:09:24.728] has:auth info "missing-user" does not exist
I0112 01:09:24.898] Successful
I0112 01:09:24.898] message:error: Error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1"
I0112 01:09:24.898] has:Error loading config file
I0112 01:09:24.987] Successful
I0112 01:09:24.987] message:error: stat missing-config: no such file or directory
I0112 01:09:24.988] has:no such file or directory
I0112 01:09:25.004] +++ exit code: 0
I0112 01:09:25.064] Recording: run_service_accounts_tests
I0112 01:09:25.064] Running command: run_service_accounts_tests
I0112 01:09:25.089] 
I0112 01:09:25.092] +++ Running case: test-cmd.run_service_accounts_tests 
... skipping 77 lines ...
I0112 01:09:32.886]                 job-name=test-job
I0112 01:09:32.886]                 run=pi
I0112 01:09:32.886] Annotations:    cronjob.kubernetes.io/instantiate=manual
I0112 01:09:32.886] Parallelism:    1
I0112 01:09:32.887] Completions:    1
I0112 01:09:32.887] Start Time:     Sat, 12 Jan 2019 01:09:32 +0000
I0112 01:09:32.887] Pods Statuses:  1 Running / 0 Succeeded / 0 Failed
I0112 01:09:32.887] Pod Template:
I0112 01:09:32.887]   Labels:  controller-uid=b815f810-1606-11e9-8033-0242ac110002
I0112 01:09:32.887]            job-name=test-job
I0112 01:09:32.887]            run=pi
I0112 01:09:32.887]   Containers:
I0112 01:09:32.887]    pi:
... skipping 303 lines ...
I0112 01:09:42.194]   selector:
I0112 01:09:42.194]     role: padawan
I0112 01:09:42.194]   sessionAffinity: None
I0112 01:09:42.194]   type: ClusterIP
I0112 01:09:42.194] status:
I0112 01:09:42.194]   loadBalancer: {}
W0112 01:09:42.295] error: you must specify resources by --filename when --local is set.
W0112 01:09:42.295] Example resource specifications include:
W0112 01:09:42.295]    '-f rsrc.yaml'
W0112 01:09:42.296]    '--filename=rsrc.json'
I0112 01:09:42.396] test-cmd-util.sh:2890: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
I0112 01:09:42.604] (Btest-cmd-util.sh:2897: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I0112 01:09:42.704] (Bservice "redis-master" deleted
... skipping 40 lines ...
I0112 01:09:46.070] +++ [0112 01:09:46] Creating namespace namespace-1547255386-26828
I0112 01:09:46.158] namespace/namespace-1547255386-26828 created
I0112 01:09:46.246] Context "test" modified.
I0112 01:09:46.254] +++ [0112 01:09:46] Testing kubectl(v1:daemonsets)
I0112 01:09:46.362] test-cmd-util.sh:3650: Successful get daemonsets {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:09:46.551] (Bdaemonset.extensions/bind created
W0112 01:09:46.651] I0112 01:09:46.555210   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1305", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:46.652] I0112 01:09:46.555272   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1305", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:46.652] I0112 01:09:46.555314   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1305", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:46.653] I0112 01:09:46.559060   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1307", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:46.653] I0112 01:09:46.559113   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1307", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:46.654] I0112 01:09:46.559129   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1307", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:46.754] test-cmd-util.sh:3654: Successful get daemonsets bind {{.spec.templateGeneration}}: 1
I0112 01:09:46.851] (Bdaemonset.extensions/bind configured
I0112 01:09:46.969] test-cmd-util.sh:3657: Successful get daemonsets bind {{.spec.templateGeneration}}: 1
I0112 01:09:47.077] (Bdaemonset.extensions/bind image updated
W0112 01:09:47.177] I0112 01:09:47.080635   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1314", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.178] I0112 01:09:47.080677   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1314", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.178] I0112 01:09:47.080852   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1314", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.179] I0112 01:09:47.084727   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1316", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.179] I0112 01:09:47.084774   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1316", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.179] I0112 01:09:47.084785   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1316", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:47.280] test-cmd-util.sh:3660: Successful get daemonsets bind {{.spec.templateGeneration}}: 2
I0112 01:09:47.316] (Bdaemonset.extensions/bind env updated
W0112 01:09:47.417] I0112 01:09:47.320916   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1323", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.418] I0112 01:09:47.321040   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1323", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.418] I0112 01:09:47.321125   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1323", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.418] I0112 01:09:47.325861   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1325", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.419] I0112 01:09:47.325904   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1325", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.419] I0112 01:09:47.325918   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1325", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:47.519] test-cmd-util.sh:3662: Successful get daemonsets bind {{.spec.templateGeneration}}: 3
I0112 01:09:47.539] (Bdaemonset.extensions/bind resource requirements updated
W0112 01:09:47.640] I0112 01:09:47.544469   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1333", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.641] I0112 01:09:47.544519   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1333", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.641] I0112 01:09:47.544605   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1333", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.642] I0112 01:09:47.549322   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1335", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.642] I0112 01:09:47.549415   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1335", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:47.643] I0112 01:09:47.549462   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255386-26828", Name:"bind", UID:"c06cc437-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1335", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:47.743] test-cmd-util.sh:3664: Successful get daemonsets bind {{.spec.templateGeneration}}: 4
I0112 01:09:48.764] (Bdaemonset.extensions "bind" deleted
I0112 01:09:48.789] +++ exit code: 0
I0112 01:09:48.839] Recording: run_daemonset_history_tests
I0112 01:09:48.839] Running command: run_daemonset_history_tests
I0112 01:09:48.864] 
... skipping 3 lines ...
I0112 01:09:48.887] +++ [0112 01:09:48] Creating namespace namespace-1547255388-24194
I0112 01:09:48.976] namespace/namespace-1547255388-24194 created
I0112 01:09:49.065] Context "test" modified.
I0112 01:09:49.074] +++ [0112 01:09:49] Testing kubectl(v1:daemonsets, v1:controllerrevisions)
I0112 01:09:49.183] test-cmd-util.sh:3682: Successful get daemonsets {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:09:49.356] (Bdaemonset.extensions/bind created
W0112 01:09:49.456] I0112 01:09:49.360133   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1353", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:49.457] I0112 01:09:49.360203   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1353", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:49.457] I0112 01:09:49.360261   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1353", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:49.458] I0112 01:09:49.364029   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1356", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:49.458] I0112 01:09:49.364059   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1356", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:49.458] I0112 01:09:49.364070   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1356", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:49.559] test-cmd-util.sh:3686: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"extensions/v1beta1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"name":"bind","namespace":"namespace-1547255388-24194"},"spec":{"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I0112 01:09:49.560] ]:
I0112 01:09:49.599] (Bdaemonset.extensions/bind skipped rollback (current template already matches revision 1)
I0112 01:09:49.716] test-cmd-util.sh:3689: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0112 01:09:49.829] (Btest-cmd-util.sh:3690: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0112 01:09:50.004] (Bdaemonset.extensions/bind configured
W0112 01:09:50.105] I0112 01:09:50.008371   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1363", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:50.105] I0112 01:09:50.008429   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1363", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:50.106] I0112 01:09:50.008540   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1363", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:50.106] I0112 01:09:50.012346   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1365", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:50.106] I0112 01:09:50.012397   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1365", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:50.107] I0112 01:09:50.012407   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1365", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:50.207] test-cmd-util.sh:3693: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
I0112 01:09:50.238] (Btest-cmd-util.sh:3694: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0112 01:09:50.349] (Btest-cmd-util.sh:3695: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0112 01:09:50.462] (Btest-cmd-util.sh:3696: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true deprecated.daemonset.template.generation:2 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"extensions/v1beta1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"name":"bind","namespace":"namespace-1547255388-24194"},"spec":{"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:latest","name":"kubernetes-pause"},{"image":"k8s.gcr.io/nginx:test-cmd","name":"app"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I0112 01:09:50.463] ]:map[deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"extensions/v1beta1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"name":"bind","namespace":"namespace-1547255388-24194"},"spec":{"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I0112 01:09:50.463]  kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true]:
... skipping 9 lines ...
I0112 01:09:50.576]   Volumes:	<none>
I0112 01:09:50.576]  (dry run)
I0112 01:09:50.695] test-cmd-util.sh:3699: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
I0112 01:09:50.812] (Btest-cmd-util.sh:3700: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0112 01:09:50.923] (Btest-cmd-util.sh:3701: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0112 01:09:51.049] (Bdaemonset.extensions/bind rolled back
W0112 01:09:51.150] I0112 01:09:51.044059   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1372", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.150] I0112 01:09:51.044105   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1372", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.151] I0112 01:09:51.044126   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1372", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.151] I0112 01:09:51.047744   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1374", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.151] I0112 01:09:51.047815   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1374", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.152] I0112 01:09:51.047829   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1374", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.152] I0112 01:09:51.048119   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1374", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.152] I0112 01:09:51.048149   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1374", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.153] I0112 01:09:51.048159   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1374", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:51.253] test-cmd-util.sh:3704: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0112 01:09:51.274] (Btest-cmd-util.sh:3705: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0112 01:09:51.393] (BSuccessful
I0112 01:09:51.394] message:error: unable to find specified revision 1000000 in history
I0112 01:09:51.394] has:unable to find specified revision
I0112 01:09:51.510] test-cmd-util.sh:3709: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0112 01:09:51.624] (Btest-cmd-util.sh:3710: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0112 01:09:51.750] (Bdaemonset.extensions/bind rolled back
W0112 01:09:51.851] I0112 01:09:51.750202   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1386", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.852] I0112 01:09:51.750249   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1386", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.852] I0112 01:09:51.750263   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1386", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.852] I0112 01:09:51.754094   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1386", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.853] I0112 01:09:51.754165   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1386", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.853] I0112 01:09:51.754316   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1386", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.856] E0112 01:09:51.758895   72784 daemon_controller.go:285] namespace-1547255388-24194/bind failed with : error storing status for daemon set &v1.DaemonSet{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"bind", GenerateName:"", Namespace:"namespace-1547255388-24194", SelfLink:"/apis/apps/v1/namespaces/namespace-1547255388-24194/daemonsets/bind", UID:"c218c670-1606-11e9-8033-0242ac110002", ResourceVersion:"1386", Generation:4, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:63682852189, loc:(*time.Location)(0x56eb260)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"DaemonSet\",\"metadata\":{\"annotations\":{\"kubernetes.io/change-cause\":\"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true\"},\"name\":\"bind\",\"namespace\":\"namespace-1547255388-24194\"},\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"service\":\"bind\"}},\"spec\":{\"affinity\":{\"podAntiAffinity\":{\"requiredDuringSchedulingIgnoredDuringExecution\":[{\"labelSelector\":{\"matchExpressions\":[{\"key\":\"service\",\"operator\":\"In\",\"values\":[\"bind\"]}]},\"namespaces\":[],\"topologyKey\":\"kubernetes.io/hostname\"}]}},\"containers\":[{\"image\":\"k8s.gcr.io/pause:latest\",\"name\":\"kubernetes-pause\"},{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"app\"}]}},\"updateStrategy\":{\"rollingUpdate\":{\"maxUnavailable\":\"10%\"},\"type\":\"RollingUpdate\"}}}\n", "kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true", "deprecated.daemonset.template.generation":"4"}, OwnerReferences:[]v1.OwnerReference(nil), Initializers:(*v1.Initializers)(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:v1.DaemonSetSpec{Selector:(*v1.LabelSelector)(0xc422fc83a0), Template:v1.PodTemplateSpec{ObjectMeta:v1.ObjectMeta{Name:"", GenerateName:"", Namespace:"", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Initializers:(*v1.Initializers)(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:v1.PodSpec{Volumes:[]v1.Volume(nil), InitContainers:[]v1.Container(nil), Containers:[]v1.Container{v1.Container{Name:"kubernetes-pause", Image:"k8s.gcr.io/pause:latest", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}, v1.Container{Name:"app", Image:"k8s.gcr.io/nginx:test-cmd", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc4247b6e28), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"", DeprecatedServiceAccount:"", AutomountServiceAccountToken:(*bool)(nil), NodeName:"", HostNetwork:false, HostPID:false, HostIPC:false, ShareProcessNamespace:(*bool)(nil), SecurityContext:(*v1.PodSecurityContext)(0xc4245cea80), ImagePullSecrets:[]v1.LocalObjectReference(nil), Hostname:"", Subdomain:"", Affinity:(*v1.Affinity)(0xc422fc8400), SchedulerName:"default-scheduler", Tolerations:[]v1.Toleration(nil), HostAliases:[]v1.HostAlias(nil), PriorityClassName:"", Priority:(*int32)(nil), DNSConfig:(*v1.PodDNSConfig)(nil), ReadinessGates:[]v1.PodReadinessGate(nil)}}, UpdateStrategy:v1.DaemonSetUpdateStrategy{Type:"RollingUpdate", RollingUpdate:(*v1.RollingUpdateDaemonSet)(0xc42422df00)}, MinReadySeconds:0, RevisionHistoryLimit:(*int32)(0xc4247b6ea0)}, Status:v1.DaemonSetStatus{CurrentNumberScheduled:0, NumberMisscheduled:0, DesiredNumberScheduled:1, NumberReady:0, ObservedGeneration:3, UpdatedNumberScheduled:0, NumberAvailable:0, NumberUnavailable:1, CollisionCount:(*int32)(nil), Conditions:[]v1.DaemonSetCondition(nil)}}: Operation cannot be fulfilled on daemonsets.apps "bind": the object has been modified; please apply your changes to the latest version and try again
W0112 01:09:51.856] I0112 01:09:51.759735   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1388", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.857] I0112 01:09:51.759845   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1388", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.857] I0112 01:09:51.759863   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1388", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.857] I0112 01:09:51.765032   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1388", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.858] I0112 01:09:51.765074   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1388", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0112 01:09:51.858] I0112 01:09:51.765087   72784 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547255388-24194", Name:"bind", UID:"c218c670-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1388", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0112 01:09:51.958] test-cmd-util.sh:3713: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
I0112 01:09:51.984] (Btest-cmd-util.sh:3714: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0112 01:09:52.097] (Btest-cmd-util.sh:3715: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0112 01:09:53.208] (Bdaemonset.extensions "bind" deleted
I0112 01:09:53.232] +++ exit code: 0
I0112 01:09:53.291] Recording: run_rc_tests
... skipping 24 lines ...
I0112 01:09:54.606] Namespace:    namespace-1547255393-845
I0112 01:09:54.606] Selector:     app=guestbook,tier=frontend
I0112 01:09:54.606] Labels:       app=guestbook
I0112 01:09:54.606]               tier=frontend
I0112 01:09:54.607] Annotations:  <none>
I0112 01:09:54.607] Replicas:     3 current / 3 desired
I0112 01:09:54.607] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:54.607] Pod Template:
I0112 01:09:54.607]   Labels:  app=guestbook
I0112 01:09:54.607]            tier=frontend
I0112 01:09:54.607]   Containers:
I0112 01:09:54.607]    php-redis:
I0112 01:09:54.607]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0112 01:09:54.738] Namespace:    namespace-1547255393-845
I0112 01:09:54.738] Selector:     app=guestbook,tier=frontend
I0112 01:09:54.738] Labels:       app=guestbook
I0112 01:09:54.738]               tier=frontend
I0112 01:09:54.739] Annotations:  <none>
I0112 01:09:54.739] Replicas:     3 current / 3 desired
I0112 01:09:54.739] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:54.739] Pod Template:
I0112 01:09:54.739]   Labels:  app=guestbook
I0112 01:09:54.739]            tier=frontend
I0112 01:09:54.739]   Containers:
I0112 01:09:54.739]    php-redis:
I0112 01:09:54.739]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
I0112 01:09:54.867] Namespace:    namespace-1547255393-845
I0112 01:09:54.867] Selector:     app=guestbook,tier=frontend
I0112 01:09:54.867] Labels:       app=guestbook
I0112 01:09:54.867]               tier=frontend
I0112 01:09:54.867] Annotations:  <none>
I0112 01:09:54.867] Replicas:     3 current / 3 desired
I0112 01:09:54.868] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:54.868] Pod Template:
I0112 01:09:54.868]   Labels:  app=guestbook
I0112 01:09:54.868]            tier=frontend
I0112 01:09:54.868]   Containers:
I0112 01:09:54.868]    php-redis:
I0112 01:09:54.868]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 12 lines ...
I0112 01:09:55.001] Namespace:    namespace-1547255393-845
I0112 01:09:55.001] Selector:     app=guestbook,tier=frontend
I0112 01:09:55.001] Labels:       app=guestbook
I0112 01:09:55.001]               tier=frontend
I0112 01:09:55.001] Annotations:  <none>
I0112 01:09:55.001] Replicas:     3 current / 3 desired
I0112 01:09:55.002] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:55.002] Pod Template:
I0112 01:09:55.002]   Labels:  app=guestbook
I0112 01:09:55.002]            tier=frontend
I0112 01:09:55.002]   Containers:
I0112 01:09:55.002]    php-redis:
I0112 01:09:55.002]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
I0112 01:09:55.171] Namespace:    namespace-1547255393-845
I0112 01:09:55.171] Selector:     app=guestbook,tier=frontend
I0112 01:09:55.171] Labels:       app=guestbook
I0112 01:09:55.171]               tier=frontend
I0112 01:09:55.171] Annotations:  <none>
I0112 01:09:55.171] Replicas:     3 current / 3 desired
I0112 01:09:55.171] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:55.171] Pod Template:
I0112 01:09:55.171]   Labels:  app=guestbook
I0112 01:09:55.172]            tier=frontend
I0112 01:09:55.172]   Containers:
I0112 01:09:55.172]    php-redis:
I0112 01:09:55.172]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0112 01:09:55.301] Namespace:    namespace-1547255393-845
I0112 01:09:55.301] Selector:     app=guestbook,tier=frontend
I0112 01:09:55.301] Labels:       app=guestbook
I0112 01:09:55.301]               tier=frontend
I0112 01:09:55.301] Annotations:  <none>
I0112 01:09:55.301] Replicas:     3 current / 3 desired
I0112 01:09:55.301] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:55.302] Pod Template:
I0112 01:09:55.302]   Labels:  app=guestbook
I0112 01:09:55.302]            tier=frontend
I0112 01:09:55.302]   Containers:
I0112 01:09:55.302]    php-redis:
I0112 01:09:55.302]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0112 01:09:55.422] Namespace:    namespace-1547255393-845
I0112 01:09:55.422] Selector:     app=guestbook,tier=frontend
I0112 01:09:55.422] Labels:       app=guestbook
I0112 01:09:55.422]               tier=frontend
I0112 01:09:55.422] Annotations:  <none>
I0112 01:09:55.422] Replicas:     3 current / 3 desired
I0112 01:09:55.423] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:55.423] Pod Template:
I0112 01:09:55.423]   Labels:  app=guestbook
I0112 01:09:55.423]            tier=frontend
I0112 01:09:55.423]   Containers:
I0112 01:09:55.423]    php-redis:
I0112 01:09:55.423]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 11 lines ...
I0112 01:09:55.557] Namespace:    namespace-1547255393-845
I0112 01:09:55.558] Selector:     app=guestbook,tier=frontend
I0112 01:09:55.558] Labels:       app=guestbook
I0112 01:09:55.558]               tier=frontend
I0112 01:09:55.558] Annotations:  <none>
I0112 01:09:55.558] Replicas:     3 current / 3 desired
I0112 01:09:55.558] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:09:55.558] Pod Template:
I0112 01:09:55.558]   Labels:  app=guestbook
I0112 01:09:55.558]            tier=frontend
I0112 01:09:55.558]   Containers:
I0112 01:09:55.559]    php-redis:
I0112 01:09:55.559]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
W0112 01:09:55.876] I0112 01:09:55.781176   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255393-845", Name:"frontend", UID:"c50cbd60-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1430", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: frontend-xxmrn
I0112 01:09:55.977] test-cmd-util.sh:3049: Successful get rc frontend {{.spec.replicas}}: 2
I0112 01:09:56.006] (Btest-cmd-util.sh:3053: Successful get rc frontend {{.spec.replicas}}: 2
I0112 01:09:56.227] (Btest-cmd-util.sh:3057: Successful get rc frontend {{.spec.replicas}}: 2
I0112 01:09:56.336] (Btest-cmd-util.sh:3061: Successful get rc frontend {{.spec.replicas}}: 2
I0112 01:09:56.448] (Breplicationcontroller/frontend scaled
W0112 01:09:56.549] error: Expected replicas to be 3, was 2
W0112 01:09:56.550] I0112 01:09:56.452422   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255393-845", Name:"frontend", UID:"c50cbd60-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1436", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-k5lmf
I0112 01:09:56.650] test-cmd-util.sh:3065: Successful get rc frontend {{.spec.replicas}}: 3
I0112 01:09:56.685] (Btest-cmd-util.sh:3069: Successful get rc frontend {{.spec.replicas}}: 3
I0112 01:09:56.798] (Breplicationcontroller/frontend scaled
W0112 01:09:56.899] I0112 01:09:56.804664   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547255393-845", Name:"frontend", UID:"c50cbd60-1606-11e9-8033-0242ac110002", APIVersion:"v1", ResourceVersion:"1441", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: frontend-k5lmf
I0112 01:09:57.000] test-cmd-util.sh:3073: Successful get rc frontend {{.spec.replicas}}: 2
... skipping 61 lines ...
I0112 01:10:01.527] service "frontend" deleted
I0112 01:10:01.535] service "frontend-2" deleted
I0112 01:10:01.542] service "frontend-3" deleted
I0112 01:10:01.550] service "frontend-4" deleted
I0112 01:10:01.558] service "frontend-5" deleted
I0112 01:10:01.680] Successful
I0112 01:10:01.680] message:error: cannot expose a { Node}
I0112 01:10:01.680] has:cannot expose
I0112 01:10:01.797] Successful
I0112 01:10:01.797] message:The Service "invalid-large-service-name-that-has-more-than-sixty-three-characters" is invalid: metadata.name: Invalid value: "invalid-large-service-name-that-has-more-than-sixty-three-characters": must be no more than 63 characters
I0112 01:10:01.797] has:metadata.name: Invalid value
I0112 01:10:01.922] Successful
I0112 01:10:01.923] message:service/kubernetes-serve-hostname-testing-sixty-three-characters-in-len exposed
... skipping 30 lines ...
I0112 01:10:04.331] (Bhorizontalpodautoscaler.autoscaling/frontend autoscaled
I0112 01:10:04.446] test-cmd-util.sh:3209: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 70
I0112 01:10:04.546] (Bhorizontalpodautoscaler.autoscaling "frontend" deleted
I0112 01:10:04.660] horizontalpodautoscaler.autoscaling/frontend autoscaled
I0112 01:10:04.776] test-cmd-util.sh:3213: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
I0112 01:10:04.874] (Bhorizontalpodautoscaler.autoscaling "frontend" deleted
W0112 01:10:04.975] Error: required flag(s) "max" not set
W0112 01:10:04.975] 
W0112 01:10:04.975] 
W0112 01:10:04.976] Examples:
W0112 01:10:04.976]   # Auto scale a deployment "foo", with the number of pods between 2 and 10, no target CPU utilization specified so a default autoscaling policy will be used:
W0112 01:10:04.976]   kubectl autoscale deployment foo --min=2 --max=10
W0112 01:10:04.976]   
... skipping 69 lines ...
I0112 01:10:05.296]       dnsPolicy: ClusterFirst
I0112 01:10:05.296]       restartPolicy: Always
I0112 01:10:05.296]       schedulerName: default-scheduler
I0112 01:10:05.296]       securityContext: {}
I0112 01:10:05.296]       terminationGracePeriodSeconds: 0
I0112 01:10:05.296] status: {}
W0112 01:10:05.397] Error from server (NotFound): deployments.extensions "nginx-deployment-resources" not found
I0112 01:10:05.575] deployment.extensions/nginx-deployment-resources created
W0112 01:10:05.676] I0112 01:10:05.578542   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources", UID:"cbc38938-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1680", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-57c6b5597b to 3
W0112 01:10:05.676] I0112 01:10:05.581434   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-57c6b5597b", UID:"cbc429a2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1681", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-57c6b5597b-njgvt
W0112 01:10:05.677] I0112 01:10:05.583718   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-57c6b5597b", UID:"cbc429a2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1681", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-57c6b5597b-s4cj9
W0112 01:10:05.677] I0112 01:10:05.585663   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-57c6b5597b", UID:"cbc429a2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1681", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-57c6b5597b-8f6lz
I0112 01:10:05.778] test-cmd-util.sh:3228: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment-resources:
... skipping 6 lines ...
W0112 01:10:06.178] I0112 01:10:06.097583   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-57c6b5597b", UID:"cbc429a2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1700", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-57c6b5597b-s4cj9
W0112 01:10:06.179] I0112 01:10:06.103612   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources", UID:"cbc38938-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1698", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-79bfbb6584 to 2
W0112 01:10:06.179] I0112 01:10:06.125122   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-79bfbb6584", UID:"cc10c4e3-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1708", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-79bfbb6584-k47fl
I0112 01:10:06.279] test-cmd-util.sh:3233: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 100m:
I0112 01:10:06.365] (Btest-cmd-util.sh:3234: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 100m:
I0112 01:10:06.590] (Bdeployment.extensions/nginx-deployment-resources resource requirements updated
W0112 01:10:06.691] error: unable to find container named redis
W0112 01:10:06.691] I0112 01:10:06.601754   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources", UID:"cbc38938-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1720", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-resources-57c6b5597b to 0
W0112 01:10:06.691] I0112 01:10:06.605608   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-57c6b5597b", UID:"cbc429a2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1724", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-57c6b5597b-njgvt
W0112 01:10:06.692] I0112 01:10:06.606776   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-57c6b5597b", UID:"cbc429a2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1724", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-57c6b5597b-8f6lz
W0112 01:10:06.692] I0112 01:10:06.608675   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources", UID:"cbc38938-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1723", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-775fc4497d to 2
W0112 01:10:06.692] I0112 01:10:06.614296   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-775fc4497d", UID:"cc5f1d1a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1730", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-775fc4497d-8s9rz
W0112 01:10:06.693] I0112 01:10:06.619920   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255393-845", Name:"nginx-deployment-resources-775fc4497d", UID:"cc5f1d1a-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1730", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-775fc4497d-jr9rr
... skipping 83 lines ...
I0112 01:10:07.407]     status: "True"
I0112 01:10:07.407]     type: Progressing
I0112 01:10:07.407]   observedGeneration: 4
I0112 01:10:07.407]   replicas: 4
I0112 01:10:07.408]   unavailableReplicas: 4
I0112 01:10:07.408]   updatedReplicas: 2
W0112 01:10:07.508] error: you must specify resources by --filename when --local is set.
W0112 01:10:07.508] Example resource specifications include:
W0112 01:10:07.509]    '-f rsrc.yaml'
W0112 01:10:07.509]    '--filename=rsrc.json'
I0112 01:10:07.610] test-cmd-util.sh:3249: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m:
I0112 01:10:07.724] (Btest-cmd-util.sh:3250: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 300m:
I0112 01:10:07.835] (Btest-cmd-util.sh:3251: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.requests.cpu}}:{{end}}: 300m:
... skipping 44 lines ...
I0112 01:10:09.617]                 pod-template-hash=1594316396
I0112 01:10:09.617] Annotations:    deployment.kubernetes.io/desired-replicas=1
I0112 01:10:09.617]                 deployment.kubernetes.io/max-replicas=2
I0112 01:10:09.617]                 deployment.kubernetes.io/revision=1
I0112 01:10:09.617] Controlled By:  Deployment/test-nginx-apps
I0112 01:10:09.617] Replicas:       1 current / 1 desired
I0112 01:10:09.617] Pods Status:    0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:09.618] Pod Template:
I0112 01:10:09.618]   Labels:  app=test-nginx-apps
I0112 01:10:09.618]            pod-template-hash=1594316396
I0112 01:10:09.618]   Containers:
I0112 01:10:09.618]    nginx:
I0112 01:10:09.618]     Image:        k8s.gcr.io/nginx:test-cmd
... skipping 96 lines ...
W0112 01:10:15.735] I0112 01:10:15.569910   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx", UID:"d0473887-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1947", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackRevisionNotFound' Unable to find the revision to rollback to.
I0112 01:10:15.836] test-cmd-util.sh:3377: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0112 01:10:15.936] (Bdeployment.extensions/nginx
W0112 01:10:16.037] I0112 01:10:15.884429   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx", UID:"d0473887-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1952", FieldPath:""}): type: 'Normal' reason: 'DeploymentRollback' Rolled back deployment "nginx" to revision 2
I0112 01:10:17.066] test-cmd-util.sh:3381: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
I0112 01:10:17.180] (Bdeployment.extensions/nginx paused
W0112 01:10:17.300] error: you cannot rollback a paused deployment; resume it first with 'kubectl rollout resume deployment/nginx' and try again
I0112 01:10:17.418] deployment.extensions/nginx resumed
W0112 01:10:17.540] I0112 01:10:17.539961   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx", UID:"d0473887-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1964", FieldPath:""}): type: 'Normal' reason: 'DeploymentRollback' Rolled back deployment "nginx" to revision 3
I0112 01:10:17.641] deployment.extensions/nginx
I0112 01:10:17.777]     deployment.kubernetes.io/revision-history: 1,3
W0112 01:10:17.880] error: desired revision (3) is different from the running revision (5)
I0112 01:10:18.048] deployment.extensions/nginx2 created
W0112 01:10:18.148] I0112 01:10:18.057465   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx2", UID:"d332d7f7-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1971", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx2-5d58d7d8d4 to 3
W0112 01:10:18.149] I0112 01:10:18.060946   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx2-5d58d7d8d4", UID:"d33451f0-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1972", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx2-5d58d7d8d4-zk7sl
W0112 01:10:18.149] I0112 01:10:18.063332   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx2-5d58d7d8d4", UID:"d33451f0-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1972", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx2-5d58d7d8d4-h5mk9
W0112 01:10:18.150] I0112 01:10:18.064147   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx2-5d58d7d8d4", UID:"d33451f0-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1972", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx2-5d58d7d8d4-ccpcc
I0112 01:10:18.250] deployment.extensions "nginx2" deleted
... skipping 9 lines ...
I0112 01:10:18.892] (Btest-cmd-util.sh:3408: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
I0112 01:10:19.001] (Bdeployment.extensions/nginx-deployment image updated
W0112 01:10:19.102] I0112 01:10:19.004430   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d37f9a3b-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2018", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-78d7b4bff9 to 1
W0112 01:10:19.102] I0112 01:10:19.007833   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-78d7b4bff9", UID:"d3c4d30f-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2019", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-78d7b4bff9-86mdg
W0112 01:10:19.102] I0112 01:10:19.012350   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d37f9a3b-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2018", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-84765bf7f9 to 2
W0112 01:10:19.103] I0112 01:10:19.016526   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-84765bf7f9", UID:"d3803842-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2025", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-84765bf7f9-z8t64
W0112 01:10:19.103] E0112 01:10:19.017968   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-78d7b4bff9" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-78d7b4bff9": the object has been modified; please apply your changes to the latest version and try again
W0112 01:10:19.103] I0112 01:10:19.018314   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d37f9a3b-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2020", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-78d7b4bff9 to 2
W0112 01:10:19.104] I0112 01:10:19.029345   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-78d7b4bff9", UID:"d3c4d30f-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2035", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-78d7b4bff9-2fq7r
I0112 01:10:19.204] test-cmd-util.sh:3411: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
I0112 01:10:19.257] (Btest-cmd-util.sh:3412: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
I0112 01:10:19.485] (Bdeployment.extensions/nginx-deployment image updated
W0112 01:10:19.585] error: unable to find container named "redis"
I0112 01:10:19.686] test-cmd-util.sh:3417: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0112 01:10:19.731] (Btest-cmd-util.sh:3418: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
I0112 01:10:19.842] (Bdeployment.extensions/nginx-deployment image updated
I0112 01:10:19.970] test-cmd-util.sh:3421: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
I0112 01:10:20.088] (Btest-cmd-util.sh:3422: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl:
I0112 01:10:20.299] (Btest-cmd-util.sh:3425: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
... skipping 23 lines ...
I0112 01:10:22.204] (Btest-cmd-util.sh:3447: Successful get secret {{range.items}}{{.metadata.name}}:{{end}}: test-set-env-secret:
I0112 01:10:22.325] (Bdeployment.extensions/nginx-deployment env updated
W0112 01:10:22.426] I0112 01:10:22.328904   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d54191c2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2108", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-cdbc49cff to 1
W0112 01:10:22.427] I0112 01:10:22.331990   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-cdbc49cff", UID:"d5c01e6f-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2109", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-cdbc49cff-2zlwv
W0112 01:10:22.427] I0112 01:10:22.338115   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d54191c2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2108", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-84765bf7f9 to 2
W0112 01:10:22.428] I0112 01:10:22.342462   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-84765bf7f9", UID:"d542351b-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2114", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-84765bf7f9-v77tz
W0112 01:10:22.428] E0112 01:10:22.344216   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-cdbc49cff" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-cdbc49cff": the object has been modified; please apply your changes to the latest version and try again
W0112 01:10:22.428] I0112 01:10:22.344542   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d54191c2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2111", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-cdbc49cff to 2
W0112 01:10:22.429] I0112 01:10:22.347521   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-cdbc49cff", UID:"d5c01e6f-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2119", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-cdbc49cff-t82f8
I0112 01:10:22.529] test-cmd-util.sh:3451: Successful get deploy nginx-deployment {{ (index (index .spec.template.spec.containers 0).env 0).name}}: KEY_2
I0112 01:10:22.556] (Btest-cmd-util.sh:3453: Successful get deploy nginx-deployment {{ len (index .spec.template.spec.containers 0).env }}: 1
I0112 01:10:22.674] (Bdeployment.extensions/nginx-deployment env updated
W0112 01:10:22.775] I0112 01:10:22.685979   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d54191c2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2132", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-84765bf7f9 to 0
... skipping 16 lines ...
I0112 01:10:23.268] deployment.extensions/nginx-deployment env updated
I0112 01:10:23.268] deployment.extensions/nginx-deployment env updated
I0112 01:10:23.284] deployment.extensions/nginx-deployment env updated
W0112 01:10:23.385] I0112 01:10:23.280251   72784 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment", UID:"d54191c2-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2189", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-844b494674 to 2
W0112 01:10:23.385] I0112 01:10:23.385024   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-cdbc49cff", UID:"d5c01e6f-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2190", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-cdbc49cff-2zlwv
W0112 01:10:23.435] I0112 01:10:23.434322   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255408-6410", Name:"nginx-deployment-cdbc49cff", UID:"d5c01e6f-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2190", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-cdbc49cff-t82f8
W0112 01:10:23.531] E0112 01:10:23.531009   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-5fcdc7cb99" failed with replicasets.apps "nginx-deployment-5fcdc7cb99" not found
W0112 01:10:23.582] E0112 01:10:23.581380   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-844b494674" failed with replicasets.apps "nginx-deployment-844b494674" not found
I0112 01:10:23.682] deployment.extensions/nginx-deployment env updated
I0112 01:10:23.683] deployment.extensions "nginx-deployment" deleted
I0112 01:10:23.683] configmap "test-set-env-config" deleted
I0112 01:10:23.713] secret "test-set-env-secret" deleted
I0112 01:10:23.738] +++ exit code: 0
I0112 01:10:23.806] Recording: run_rs_tests
... skipping 3 lines ...
I0112 01:10:23.835] +++ working dir: /go/src/k8s.io/kubernetes
I0112 01:10:23.838] +++ command: run_rs_tests
I0112 01:10:23.853] +++ [0112 01:10:23] Creating namespace namespace-1547255423-28392
I0112 01:10:23.941] namespace/namespace-1547255423-28392 created
I0112 01:10:24.025] Context "test" modified.
I0112 01:10:24.033] +++ [0112 01:10:24] Testing kubectl(v1:replicasets)
W0112 01:10:24.134] E0112 01:10:23.782136   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-67c9c8994" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-67c9c8994": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1547255408-6410/nginx-deployment-67c9c8994, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: d62b9993-1606-11e9-8033-0242ac110002, UID in object meta: 
W0112 01:10:24.134] E0112 01:10:23.931792   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-7b6cf544d6" failed with replicasets.apps "nginx-deployment-7b6cf544d6" not found
W0112 01:10:24.135] E0112 01:10:23.981735   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-cdbc49cff" failed with replicasets.apps "nginx-deployment-cdbc49cff" not found
W0112 01:10:24.135] E0112 01:10:24.031770   72784 replica_set.go:450] Sync "namespace-1547255408-6410/nginx-deployment-f7b94bfb8" failed with replicasets.apps "nginx-deployment-f7b94bfb8" not found
I0112 01:10:24.235] test-cmd-util.sh:3486: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:10:24.309] (Breplicaset.extensions/frontend created
I0112 01:10:24.320] +++ [0112 01:10:24] Deleting rs
I0112 01:10:24.417] replicaset.extensions "frontend" deleted
W0112 01:10:24.517] I0112 01:10:24.315100   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255423-28392", Name:"frontend", UID:"d6ee40a6-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2223", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-l4kqv
W0112 01:10:24.518] I0112 01:10:24.317810   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255423-28392", Name:"frontend", UID:"d6ee40a6-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2223", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-wcqcf
W0112 01:10:24.518] I0112 01:10:24.318145   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255423-28392", Name:"frontend", UID:"d6ee40a6-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2223", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-xhhx9
W0112 01:10:24.518] E0112 01:10:24.481716   72784 replica_set.go:450] Sync "namespace-1547255423-28392/frontend" failed with replicasets.apps "frontend" not found
I0112 01:10:24.619] test-cmd-util.sh:3492: Successful get pods -l "tier=frontend" {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:10:24.636] (Btest-cmd-util.sh:3496: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:10:24.803] (Breplicaset.extensions/frontend created
W0112 01:10:24.904] I0112 01:10:24.806841   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255423-28392", Name:"frontend", UID:"d739ba5d-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2238", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-ks88v
W0112 01:10:24.905] I0112 01:10:24.809661   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255423-28392", Name:"frontend", UID:"d739ba5d-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2238", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-lvnsr
W0112 01:10:24.905] I0112 01:10:24.809778   72784 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547255423-28392", Name:"frontend", UID:"d739ba5d-1606-11e9-8033-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2238", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-56rjx
I0112 01:10:25.005] test-cmd-util.sh:3500: Successful get pods -l "tier=frontend" {{range.items}}{{(index .spec.containers 0).name}}:{{end}}: php-redis:php-redis:php-redis:
I0112 01:10:25.006] (B+++ [0112 01:10:24] Deleting rs
I0112 01:10:25.029] replicaset.extensions "frontend" deleted
W0112 01:10:25.132] E0112 01:10:25.131664   72784 replica_set.go:450] Sync "namespace-1547255423-28392/frontend" failed with replicasets.apps "frontend" not found
I0112 01:10:25.233] test-cmd-util.sh:3504: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:10:25.258] (Btest-cmd-util.sh:3506: Successful get pods -l "tier=frontend" {{range.items}}{{(index .spec.containers 0).name}}:{{end}}: php-redis:php-redis:php-redis:
I0112 01:10:25.358] (Bpod "frontend-56rjx" deleted
I0112 01:10:25.364] pod "frontend-ks88v" deleted
I0112 01:10:25.370] pod "frontend-lvnsr" deleted
I0112 01:10:25.494] test-cmd-util.sh:3509: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 8 lines ...
I0112 01:10:26.098] Namespace:    namespace-1547255423-28392
I0112 01:10:26.098] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.098] Labels:       app=guestbook
I0112 01:10:26.098]               tier=frontend
I0112 01:10:26.098] Annotations:  <none>
I0112 01:10:26.098] Replicas:     3 current / 3 desired
I0112 01:10:26.098] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.098] Pod Template:
I0112 01:10:26.098]   Labels:  app=guestbook
I0112 01:10:26.099]            tier=frontend
I0112 01:10:26.099]   Containers:
I0112 01:10:26.099]    php-redis:
I0112 01:10:26.099]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
I0112 01:10:26.231] Namespace:    namespace-1547255423-28392
I0112 01:10:26.231] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.232] Labels:       app=guestbook
I0112 01:10:26.232]               tier=frontend
I0112 01:10:26.232] Annotations:  <none>
I0112 01:10:26.232] Replicas:     3 current / 3 desired
I0112 01:10:26.232] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.232] Pod Template:
I0112 01:10:26.232]   Labels:  app=guestbook
I0112 01:10:26.232]            tier=frontend
I0112 01:10:26.232]   Containers:
I0112 01:10:26.233]    php-redis:
I0112 01:10:26.233]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 18 lines ...
I0112 01:10:26.368] Namespace:    namespace-1547255423-28392
I0112 01:10:26.368] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.369] Labels:       app=guestbook
I0112 01:10:26.369]               tier=frontend
I0112 01:10:26.369] Annotations:  <none>
I0112 01:10:26.369] Replicas:     3 current / 3 desired
I0112 01:10:26.369] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.369] Pod Template:
I0112 01:10:26.369]   Labels:  app=guestbook
I0112 01:10:26.369]            tier=frontend
I0112 01:10:26.369]   Containers:
I0112 01:10:26.369]    php-redis:
I0112 01:10:26.369]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 12 lines ...
I0112 01:10:26.503] Namespace:    namespace-1547255423-28392
I0112 01:10:26.503] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.503] Labels:       app=guestbook
I0112 01:10:26.503]               tier=frontend
I0112 01:10:26.503] Annotations:  <none>
I0112 01:10:26.503] Replicas:     3 current / 3 desired
I0112 01:10:26.504] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.504] Pod Template:
I0112 01:10:26.504]   Labels:  app=guestbook
I0112 01:10:26.504]            tier=frontend
I0112 01:10:26.504]   Containers:
I0112 01:10:26.504]    php-redis:
I0112 01:10:26.504]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 18 lines ...
I0112 01:10:26.679] Namespace:    namespace-1547255423-28392
I0112 01:10:26.679] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.679] Labels:       app=guestbook
I0112 01:10:26.679]               tier=frontend
I0112 01:10:26.680] Annotations:  <none>
I0112 01:10:26.680] Replicas:     3 current / 3 desired
I0112 01:10:26.680] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.680] Pod Template:
I0112 01:10:26.680]   Labels:  app=guestbook
I0112 01:10:26.680]            tier=frontend
I0112 01:10:26.680]   Containers:
I0112 01:10:26.681]    php-redis:
I0112 01:10:26.681]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
I0112 01:10:26.810] Namespace:    namespace-1547255423-28392
I0112 01:10:26.810] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.810] Labels:       app=guestbook
I0112 01:10:26.810]               tier=frontend
I0112 01:10:26.810] Annotations:  <none>
I0112 01:10:26.810] Replicas:     3 current / 3 desired
I0112 01:10:26.811] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.811] Pod Template:
I0112 01:10:26.811]   Labels:  app=guestbook
I0112 01:10:26.811]            tier=frontend
I0112 01:10:26.811]   Containers:
I0112 01:10:26.811]    php-redis:
I0112 01:10:26.811]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
I0112 01:10:26.941] Namespace:    namespace-1547255423-28392
I0112 01:10:26.941] Selector:     app=guestbook,tier=frontend
I0112 01:10:26.941] Labels:       app=guestbook
I0112 01:10:26.941]               tier=frontend
I0112 01:10:26.942] Annotations:  <none>
I0112 01:10:26.942] Replicas:     3 current / 3 desired
I0112 01:10:26.942] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:26.942] Pod Template:
I0112 01:10:26.942]   Labels:  app=guestbook
I0112 01:10:26.942]            tier=frontend
I0112 01:10:26.942]   Containers:
I0112 01:10:26.942]    php-redis:
I0112 01:10:26.942]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 11 lines ...
I0112 01:10:27.068] Namespace:    namespace-1547255423-28392
I0112 01:10:27.068] Selector:     app=guestbook,tier=frontend
I0112 01:10:27.068] Labels:       app=guestbook
I0112 01:10:27.068]               tier=frontend
I0112 01:10:27.068] Annotations:  <none>
I0112 01:10:27.068] Replicas:     3 current / 3 desired
I0112 01:10:27.068] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:27.068] Pod Template:
I0112 01:10:27.068]   Labels:  app=guestbook
I0112 01:10:27.068]            tier=frontend
I0112 01:10:27.069]   Containers:
I0112 01:10:27.069]    php-redis:
I0112 01:10:27.069]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 183 lines ...
I0112 01:10:33.092] (Bhorizontalpodautoscaler.autoscaling/frontend autoscaled
I0112 01:10:33.210] test-cmd-util.sh:3625: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 70
I0112 01:10:33.309] (Bhorizontalpodautoscaler.autoscaling "frontend" deleted
I0112 01:10:33.422] horizontalpodautoscaler.autoscaling/frontend autoscaled
I0112 01:10:33.540] test-cmd-util.sh:3629: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
I0112 01:10:33.639] (Bhorizontalpodautoscaler.autoscaling "frontend" deleted
W0112 01:10:33.740] Error: required flag(s) "max" not set
W0112 01:10:33.740] 
W0112 01:10:33.740] 
W0112 01:10:33.740] Examples:
W0112 01:10:33.740]   # Auto scale a deployment "foo", with the number of pods between 2 and 10, no target CPU utilization specified so a default autoscaling policy will be used:
W0112 01:10:33.740]   kubectl autoscale deployment foo --min=2 --max=10
W0112 01:10:33.740]   
... skipping 89 lines ...
I0112 01:10:37.480] (Btest-cmd-util.sh:3750: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0112 01:10:37.594] (Btest-cmd-util.sh:3751: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0112 01:10:37.719] (Bstatefulset.apps/nginx rolled back
I0112 01:10:37.841] test-cmd-util.sh:3754: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
I0112 01:10:37.964] (Btest-cmd-util.sh:3755: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0112 01:10:38.096] (BSuccessful
I0112 01:10:38.097] message:error: unable to find specified revision 1000000 in history
I0112 01:10:38.097] has:unable to find specified revision
I0112 01:10:38.226] test-cmd-util.sh:3759: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
I0112 01:10:38.354] (Btest-cmd-util.sh:3760: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0112 01:10:38.477] (Bstatefulset.apps/nginx rolled back
I0112 01:10:38.607] test-cmd-util.sh:3763: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.8:
I0112 01:10:38.720] (Btest-cmd-util.sh:3764: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
... skipping 58 lines ...
I0112 01:10:40.873] Name:         mock
I0112 01:10:40.873] Namespace:    namespace-1547255439-10805
I0112 01:10:40.873] Selector:     app=mock
I0112 01:10:40.873] Labels:       app=mock
I0112 01:10:40.873] Annotations:  <none>
I0112 01:10:40.873] Replicas:     1 current / 1 desired
I0112 01:10:40.873] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:40.873] Pod Template:
I0112 01:10:40.873]   Labels:  app=mock
I0112 01:10:40.873]   Containers:
I0112 01:10:40.873]    mock-container:
I0112 01:10:40.874]     Image:        k8s.gcr.io/pause:2.0
I0112 01:10:40.874]     Port:         9949/TCP
... skipping 57 lines ...
I0112 01:10:43.454] Name:         mock
I0112 01:10:43.454] Namespace:    namespace-1547255439-10805
I0112 01:10:43.454] Selector:     app=mock
I0112 01:10:43.455] Labels:       app=mock
I0112 01:10:43.455] Annotations:  <none>
I0112 01:10:43.455] Replicas:     1 current / 1 desired
I0112 01:10:43.455] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:43.455] Pod Template:
I0112 01:10:43.455]   Labels:  app=mock
I0112 01:10:43.455]   Containers:
I0112 01:10:43.456]    mock-container:
I0112 01:10:43.456]     Image:        k8s.gcr.io/pause:2.0
I0112 01:10:43.456]     Port:         9949/TCP
... skipping 56 lines ...
I0112 01:10:46.111] Name:         mock
I0112 01:10:46.112] Namespace:    namespace-1547255439-10805
I0112 01:10:46.112] Selector:     app=mock
I0112 01:10:46.112] Labels:       app=mock
I0112 01:10:46.112] Annotations:  <none>
I0112 01:10:46.112] Replicas:     1 current / 1 desired
I0112 01:10:46.112] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:46.112] Pod Template:
I0112 01:10:46.112]   Labels:  app=mock
I0112 01:10:46.112]   Containers:
I0112 01:10:46.112]    mock-container:
I0112 01:10:46.112]     Image:        k8s.gcr.io/pause:2.0
I0112 01:10:46.113]     Port:         9949/TCP
... skipping 42 lines ...
I0112 01:10:48.611] Namespace:    namespace-1547255439-10805
I0112 01:10:48.611] Selector:     app=mock
I0112 01:10:48.611] Labels:       app=mock
I0112 01:10:48.612]               status=replaced
I0112 01:10:48.612] Annotations:  <none>
I0112 01:10:48.612] Replicas:     1 current / 1 desired
I0112 01:10:48.612] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:48.612] Pod Template:
I0112 01:10:48.612]   Labels:  app=mock
I0112 01:10:48.612]   Containers:
I0112 01:10:48.612]    mock-container:
I0112 01:10:48.612]     Image:        k8s.gcr.io/pause:2.0
I0112 01:10:48.613]     Port:         9949/TCP
... skipping 11 lines ...
I0112 01:10:48.620] Namespace:    namespace-1547255439-10805
I0112 01:10:48.620] Selector:     app=mock2
I0112 01:10:48.620] Labels:       app=mock2
I0112 01:10:48.620]               status=replaced
I0112 01:10:48.620] Annotations:  <none>
I0112 01:10:48.621] Replicas:     1 current / 1 desired
I0112 01:10:48.621] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0112 01:10:48.621] Pod Template:
I0112 01:10:48.621]   Labels:  app=mock2
I0112 01:10:48.621]   Containers:
I0112 01:10:48.621]    mock-container:
I0112 01:10:48.621]     Image:        k8s.gcr.io/pause:2.0
I0112 01:10:48.621]     Port:         9949/TCP
... skipping 107 lines ...
I0112 01:10:54.474] +++ [0112 01:10:54] Testing persistent volumes
I0112 01:10:54.584] test-cmd-util.sh:4476: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:10:54.748] (Bpersistentvolume/pv0001 created
I0112 01:10:54.865] test-cmd-util.sh:4479: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0001:
I0112 01:10:54.962] (Bpersistentvolume "pv0001" deleted
I0112 01:10:55.128] persistentvolume/pv0002 created
W0112 01:10:55.228] E0112 01:10:55.130850   72784 pv_protection_controller.go:116] PV pv0002 failed with : Operation cannot be fulfilled on persistentvolumes "pv0002": the object has been modified; please apply your changes to the latest version and try again
I0112 01:10:55.329] test-cmd-util.sh:4482: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0002:
I0112 01:10:55.348] (Bpersistentvolume "pv0002" deleted
I0112 01:10:55.522] persistentvolume/pv0003 created
W0112 01:10:55.622] E0112 01:10:55.524724   72784 pv_protection_controller.go:116] PV pv0003 failed with : Operation cannot be fulfilled on persistentvolumes "pv0003": the object has been modified; please apply your changes to the latest version and try again
I0112 01:10:55.723] test-cmd-util.sh:4485: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0003:
I0112 01:10:55.744] (Bpersistentvolume "pv0003" deleted
I0112 01:10:55.861] test-cmd-util.sh:4488: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: 
I0112 01:10:55.878] (B+++ exit code: 0
I0112 01:10:55.939] Recording: run_persistent_volume_claims_tests
I0112 01:10:55.939] Running command: run_persistent_volume_claims_tests
... skipping 459 lines ...
I0112 01:11:02.171] yes
I0112 01:11:02.171] has:the server doesn't have a resource type
I0112 01:11:02.268] Successful
I0112 01:11:02.268] message:yes
I0112 01:11:02.268] has:yes
I0112 01:11:02.359] Successful
I0112 01:11:02.359] message:error: --subresource can not be used with NonResourceURL
I0112 01:11:02.360] has:subresource can not be used with NonResourceURL
I0112 01:11:02.463] Successful
I0112 01:11:02.568] Successful
I0112 01:11:02.569] message:yes
I0112 01:11:02.569] 0
I0112 01:11:02.569] has:0
... skipping 822 lines ...
I0112 01:11:33.674] message:node/127.0.0.1 already uncordoned (dry run)
I0112 01:11:33.674] has:already uncordoned
I0112 01:11:33.780] test-cmd-util.sh:4971: Successful get nodes 127.0.0.1 {{.spec.unschedulable}}: <no value>
I0112 01:11:33.877] (Bnode/127.0.0.1 labeled
I0112 01:11:33.989] test-cmd-util.sh:4976: Successful get nodes 127.0.0.1 {{.metadata.labels.test}}: label
I0112 01:11:34.074] (BSuccessful
I0112 01:11:34.075] message:error: cannot specify both a node name and a --selector option
I0112 01:11:34.075] See 'kubectl drain -h' for help and examples.
I0112 01:11:34.075] has:cannot specify both a node name
I0112 01:11:34.159] Successful
I0112 01:11:34.159] message:error: USAGE: cordon NODE [flags]
I0112 01:11:34.159] See 'kubectl cordon -h' for help and examples.
I0112 01:11:34.159] has:error\: USAGE\: cordon NODE
I0112 01:11:34.259] node/127.0.0.1 already uncordoned
I0112 01:11:34.356] Successful
I0112 01:11:34.356] message:error: You must provide one or more resources by argument or filename.
I0112 01:11:34.356] Example resource specifications include:
I0112 01:11:34.356]    '-f rsrc.yaml'
I0112 01:11:34.356]    '--filename=rsrc.json'
I0112 01:11:34.357]    '<resource> <name>'
I0112 01:11:34.357]    '<resource>'
I0112 01:11:34.357] has:must provide one or more resources
... skipping 77 lines ...
I0112 01:11:34.889]   kubectl [flags] [options]
I0112 01:11:34.889] 
I0112 01:11:34.889] Use "kubectl <command> --help" for more information about a given command.
I0112 01:11:34.889] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0112 01:11:34.890] has:plugin\s\+Runs a command-line plugin
I0112 01:11:34.968] Successful
I0112 01:11:34.969] message:error: no plugins installed.
I0112 01:11:34.969] has:no plugins installed
I0112 01:11:35.057] Successful
I0112 01:11:35.058] message:Runs a command-line plugin. 
I0112 01:11:35.058] 
I0112 01:11:35.058] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.058] 
I0112 01:11:35.058] Available Commands:
I0112 01:11:35.059]   echo        Echoes for test-cmd
I0112 01:11:35.059]   env         The plugin envs plugin
I0112 01:11:35.059]   error       The tremendous plugin that always fails!
I0112 01:11:35.059]   get         The wonderful new plugin-based get!
I0112 01:11:35.059]   tree        Plugin with a tree of commands
I0112 01:11:35.059] 
I0112 01:11:35.059] Usage:
I0112 01:11:35.060]   kubectl plugin NAME [options]
I0112 01:11:35.060] 
... skipping 5 lines ...
I0112 01:11:35.060] 
I0112 01:11:35.061] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.061] 
I0112 01:11:35.061] Available Commands:
I0112 01:11:35.061]   echo        Echoes for test-cmd
I0112 01:11:35.061]   env         The plugin envs plugin
I0112 01:11:35.061]   error       The tremendous plugin that always fails!
I0112 01:11:35.062]   get         The wonderful new plugin-based get!
I0112 01:11:35.062]   tree        Plugin with a tree of commands
I0112 01:11:35.062] 
I0112 01:11:35.062] Usage:
I0112 01:11:35.062]   kubectl plugin NAME [options]
I0112 01:11:35.062] 
... skipping 5 lines ...
I0112 01:11:35.063] 
I0112 01:11:35.063] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.064] 
I0112 01:11:35.064] Available Commands:
I0112 01:11:35.064]   echo        Echoes for test-cmd
I0112 01:11:35.064]   env         The plugin envs plugin
I0112 01:11:35.064]   error       The tremendous plugin that always fails!
I0112 01:11:35.064]   get         The wonderful new plugin-based get!
I0112 01:11:35.064]   tree        Plugin with a tree of commands
I0112 01:11:35.064] 
I0112 01:11:35.064] Usage:
I0112 01:11:35.065]   kubectl plugin NAME [options]
I0112 01:11:35.065] 
I0112 01:11:35.065] Use "kubectl <command> --help" for more information about a given command.
I0112 01:11:35.065] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0112 01:11:35.065] has:error\s\+The tremendous plugin that always fails!
I0112 01:11:35.065] Successful
I0112 01:11:35.065] message:Runs a command-line plugin. 
I0112 01:11:35.066] 
I0112 01:11:35.066] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.066] 
I0112 01:11:35.066] Available Commands:
I0112 01:11:35.066]   echo        Echoes for test-cmd
I0112 01:11:35.066]   env         The plugin envs plugin
I0112 01:11:35.067]   error       The tremendous plugin that always fails!
I0112 01:11:35.067]   get         The wonderful new plugin-based get!
I0112 01:11:35.067]   tree        Plugin with a tree of commands
I0112 01:11:35.067] 
I0112 01:11:35.067] Usage:
I0112 01:11:35.067]   kubectl plugin NAME [options]
I0112 01:11:35.067] 
... skipping 5 lines ...
I0112 01:11:35.068] 
I0112 01:11:35.068] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.069] 
I0112 01:11:35.069] Available Commands:
I0112 01:11:35.069]   echo        Echoes for test-cmd
I0112 01:11:35.069]   env         The plugin envs plugin
I0112 01:11:35.069]   error       The tremendous plugin that always fails!
I0112 01:11:35.069]   get         The wonderful new plugin-based get!
I0112 01:11:35.069]   tree        Plugin with a tree of commands
I0112 01:11:35.070] 
I0112 01:11:35.070] Usage:
I0112 01:11:35.070]   kubectl plugin NAME [options]
I0112 01:11:35.070] 
... skipping 5 lines ...
I0112 01:11:35.071] 
I0112 01:11:35.071] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.071] 
I0112 01:11:35.071] Available Commands:
I0112 01:11:35.071]   echo        Echoes for test-cmd
I0112 01:11:35.071]   env         The plugin envs plugin
I0112 01:11:35.072]   error       The tremendous plugin that always fails!
I0112 01:11:35.072]   get         The wonderful new plugin-based get!
I0112 01:11:35.072]   tree        Plugin with a tree of commands
I0112 01:11:35.072] 
I0112 01:11:35.072] Usage:
I0112 01:11:35.072]   kubectl plugin NAME [options]
I0112 01:11:35.072] 
... skipping 5 lines ...
I0112 01:11:35.161] 
I0112 01:11:35.161] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.161] 
I0112 01:11:35.162] Available Commands:
I0112 01:11:35.162]   echo        Echoes for test-cmd
I0112 01:11:35.162]   env         The plugin envs plugin
I0112 01:11:35.162]   error       The tremendous plugin that always fails!
I0112 01:11:35.162]   get         The wonderful new plugin-based get!
I0112 01:11:35.163]   hello       The hello plugin
I0112 01:11:35.163]   tree        Plugin with a tree of commands
I0112 01:11:35.163] 
I0112 01:11:35.163] Usage:
I0112 01:11:35.163]   kubectl plugin NAME [options]
... skipping 6 lines ...
I0112 01:11:35.164] 
I0112 01:11:35.165] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.165] 
I0112 01:11:35.165] Available Commands:
I0112 01:11:35.165]   echo        Echoes for test-cmd
I0112 01:11:35.165]   env         The plugin envs plugin
I0112 01:11:35.165]   error       The tremendous plugin that always fails!
I0112 01:11:35.165]   get         The wonderful new plugin-based get!
I0112 01:11:35.165]   hello       The hello plugin
I0112 01:11:35.166]   tree        Plugin with a tree of commands
I0112 01:11:35.166] 
I0112 01:11:35.166] Usage:
I0112 01:11:35.166]   kubectl plugin NAME [options]
... skipping 6 lines ...
I0112 01:11:35.167] 
I0112 01:11:35.167] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.167] 
I0112 01:11:35.167] Available Commands:
I0112 01:11:35.168]   echo        Echoes for test-cmd
I0112 01:11:35.168]   env         The plugin envs plugin
I0112 01:11:35.168]   error       The tremendous plugin that always fails!
I0112 01:11:35.168]   get         The wonderful new plugin-based get!
I0112 01:11:35.168]   hello       The hello plugin
I0112 01:11:35.168]   tree        Plugin with a tree of commands
I0112 01:11:35.168] 
I0112 01:11:35.169] Usage:
I0112 01:11:35.169]   kubectl plugin NAME [options]
I0112 01:11:35.169] 
I0112 01:11:35.169] Use "kubectl <command> --help" for more information about a given command.
I0112 01:11:35.169] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0112 01:11:35.169] has:error\s\+The tremendous plugin that always fails!
I0112 01:11:35.169] Successful
I0112 01:11:35.170] message:Runs a command-line plugin. 
I0112 01:11:35.170] 
I0112 01:11:35.170] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.170] 
I0112 01:11:35.170] Available Commands:
I0112 01:11:35.170]   echo        Echoes for test-cmd
I0112 01:11:35.170]   env         The plugin envs plugin
I0112 01:11:35.171]   error       The tremendous plugin that always fails!
I0112 01:11:35.171]   get         The wonderful new plugin-based get!
I0112 01:11:35.171]   hello       The hello plugin
I0112 01:11:35.171]   tree        Plugin with a tree of commands
I0112 01:11:35.171] 
I0112 01:11:35.171] Usage:
I0112 01:11:35.171]   kubectl plugin NAME [options]
... skipping 6 lines ...
I0112 01:11:35.173] 
I0112 01:11:35.173] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0112 01:11:35.173] 
I0112 01:11:35.173] Available Commands:
I0112 01:11:35.173]   echo        Echoes for test-cmd
I0112 01:11:35.173]   env         The plugin envs plugin
I0112 01:11:35.174]   error       The tremendous plugin that always fails!
I0112 01:11:35.174]   get         The wonderful new plugin-based get!
I0112 01:11:35.174]   hello       The hello plugin
I0112 01:11:35.174]   tree        Plugin with a tree of commands
I0112 01:11:35.174] 
I0112 01:11:35.174] Usage:
I0112 01:11:35.174]   kubectl plugin NAME [options]
... skipping 159 lines ...
I0112 01:11:35.450] #######
I0112 01:11:35.450] has:#hello#
I0112 01:11:35.542] Successful
I0112 01:11:35.542] message:This plugin works!
I0112 01:11:35.543] has:This plugin works!
I0112 01:11:35.629] Successful
I0112 01:11:35.629] message:error: unknown command "hello"
I0112 01:11:35.630] See 'kubectl plugin -h' for help and examples.
I0112 01:11:35.630] has:unknown command
I0112 01:11:35.718] Successful
I0112 01:11:35.719] message:error: exit status 1
I0112 01:11:35.719] has:error: exit status 1
I0112 01:11:35.811] Successful
I0112 01:11:35.811] message:Plugin with a tree of commands
I0112 01:11:35.811] 
I0112 01:11:35.811] Available Commands:
I0112 01:11:35.811]   child1      The first child of a tree
I0112 01:11:35.811]   child2      The second child of a tree
... skipping 467 lines ...
I0112 01:11:36.288] 
I0112 01:11:36.291] +++ Running case: test-cmd.run_impersonation_tests 
I0112 01:11:36.293] +++ working dir: /go/src/k8s.io/kubernetes
I0112 01:11:36.296] +++ command: run_impersonation_tests
I0112 01:11:36.307] +++ [0112 01:11:36] Testing impersonation
I0112 01:11:36.394] Successful
I0112 01:11:36.394] message:error: requesting groups or user-extra for  without impersonating a user
I0112 01:11:36.394] has:without impersonating a user
I0112 01:11:36.573] certificatesigningrequest.certificates.k8s.io/foo created
I0112 01:11:36.689] test-cmd-util.sh:5101: Successful get csr/foo {{.spec.username}}: user1
I0112 01:11:36.798] (Btest-cmd-util.sh:5102: Successful get csr/foo {{range .spec.groups}}{{.}}{{end}}: system:authenticated
I0112 01:11:36.909] (Bcertificatesigningrequest.certificates.k8s.io "foo" deleted
I0112 01:11:37.097] certificatesigningrequest.certificates.k8s.io/foo created
... skipping 12 lines ...
W0112 01:11:37.731] I0112 01:11:37.730358   68585 crd_finalizer.go:254] Shutting down CRDFinalizer
W0112 01:11:37.731] I0112 01:11:37.730054   68585 serve.go:156] Stopped listening on 127.0.0.1:6443
W0112 01:11:37.731] I0112 01:11:37.730440   68585 customresource_discovery_controller.go:210] Shutting down DiscoveryController
W0112 01:11:37.732] I0112 01:11:37.730195   68585 serve.go:156] Stopped listening on 127.0.0.1:8080
W0112 01:11:37.732] I0112 01:11:37.730069   68585 controller.go:90] Shutting down OpenAPI AggregationController
W0112 01:11:37.732] I0112 01:11:37.731233   68585 controller.go:158] Shutting down kubernetes service endpoint reconciler
W0112 01:11:37.735] E0112 01:11:37.734833   68585 controller.go:160] rpc error: code = Unavailable desc = transport is closing
I0112 01:11:37.836] No resources found
I0112 01:11:37.836] pod "test-pod-1" force deleted
I0112 01:11:37.836] +++ [0112 01:11:37] TESTS PASSED
I0112 01:11:37.836] junit report dir: /workspace/artifacts
I0112 01:11:37.837] +++ [0112 01:11:37] Clean up complete
W0112 01:11:37.938] + make test-integration
... skipping 170 lines ...
I0112 01:21:08.025] ok  	k8s.io/kubernetes/test/integration/replicationcontroller	58.181s
I0112 01:21:08.025] [restful] 2019/01/12 01:15:35 log.go:33: [restful/swagger] listing is available at https://172.17.0.2:37743/swaggerapi
I0112 01:21:08.025] [restful] 2019/01/12 01:15:35 log.go:33: [restful/swagger] https://172.17.0.2:37743/swaggerui/ is mapped to folder /swagger-ui/
I0112 01:21:08.026] [restful] 2019/01/12 01:15:37 log.go:33: [restful/swagger] listing is available at https://172.17.0.2:37743/swaggerapi
I0112 01:21:08.026] [restful] 2019/01/12 01:15:37 log.go:33: [restful/swagger] https://172.17.0.2:37743/swaggerui/ is mapped to folder /swagger-ui/
I0112 01:21:08.026] ok  	k8s.io/kubernetes/test/integration/scale	13.718s
I0112 01:21:08.026] FAIL	k8s.io/kubernetes/test/integration/scheduler	272.122s
I0112 01:21:08.026] ok  	k8s.io/kubernetes/test/integration/scheduler_perf	1.269s
I0112 01:21:08.026] ok  	k8s.io/kubernetes/test/integration/secrets	5.039s
I0112 01:21:08.026] ok  	k8s.io/kubernetes/test/integration/serviceaccount	47.171s
I0112 01:21:08.026] ok  	k8s.io/kubernetes/test/integration/statefulset	12.413s
I0112 01:21:08.026] ok  	k8s.io/kubernetes/test/integration/storageclasses	5.147s
I0112 01:21:08.027] [restful] 2019/01/12 01:17:07 log.go:33: [restful/swagger] listing is available at https://172.17.0.2:38001/swaggerapi
... skipping 2 lines ...
I0112 01:21:08.027] [restful] 2019/01/12 01:17:09 log.go:33: [restful/swagger] https://172.17.0.2:38001/swaggerui/ is mapped to folder /swagger-ui/
I0112 01:21:08.027] ok  	k8s.io/kubernetes/test/integration/tls	14.428s
I0112 01:21:08.027] ok  	k8s.io/kubernetes/test/integration/ttlcontroller	11.715s
I0112 01:21:08.027] ok  	k8s.io/kubernetes/test/integration/volume	89.082s
I0112 01:21:08.101] ok  	k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration	108.464s
I0112 01:21:08.767] +++ [0112 01:21:08] Saved JUnit XML test report to /workspace/artifacts/junit_cae8d27844a37937152775ec7fb068d1755ac188_20190112-011151.xml
I0112 01:21:08.771] Makefile:184: recipe for target 'test' failed
I0112 01:21:08.783] +++ [0112 01:21:08] Cleaning up etcd
W0112 01:21:08.884] make[1]: *** [test] Error 1
W0112 01:21:08.884] !!! [0112 01:21:08] Call tree:
W0112 01:21:08.885] !!! [0112 01:21:08]  1: hack/make-rules/test-integration.sh:105 runTests(...)
I0112 01:21:09.049] +++ [0112 01:21:09] Integration test cleanup complete
I0112 01:21:09.050] Makefile:203: recipe for target 'test-integration' failed
W0112 01:21:09.150] make: *** [test-integration] Error 1
W0112 01:21:10.861] Traceback (most recent call last):
W0112 01:21:10.861]   File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 178, in <module>
W0112 01:21:10.862]     ARGS.exclude_typecheck, ARGS.exclude_godep)
W0112 01:21:10.862]   File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 140, in main
W0112 01:21:10.862]     check(*cmd)
W0112 01:21:10.862]   File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 48, in check
W0112 01:21:10.862]     subprocess.check_call(cmd)
W0112 01:21:10.862]   File "/usr/lib/python2.7/subprocess.py", line 186, in check_call
W0112 01:21:10.862]     raise CalledProcessError(retcode, cmd)
W0112 01:21:10.863] subprocess.CalledProcessError: Command '('docker', 'run', '--rm=true', '--privileged=true', '-v', '/var/run/docker.sock:/var/run/docker.sock', '-v', '/etc/localtime:/etc/localtime:ro', '-v', '/workspace/k8s.io/kubernetes:/go/src/k8s.io/kubernetes', '-v', '/workspace/k8s.io/:/workspace/k8s.io/', '-v', '/workspace/_artifacts:/workspace/artifacts', '-e', 'KUBE_FORCE_VERIFY_CHECKS=n', '-e', 'KUBE_VERIFY_GIT_BRANCH=release-1.11', '-e', 'EXCLUDE_TYPECHECK=n', '-e', 'EXCLUDE_GODEP=n', '-e', 'REPO_DIR=/workspace/k8s.io/kubernetes', '--tmpfs', '/tmp:exec,mode=1777', 'gcr.io/k8s-testimages/kubekins-test:1.11-v20181218-db74ab3f4', 'bash', '-c', 'cd kubernetes && ./hack/jenkins/test-dockerized.sh')' returned non-zero exit status 2
E0112 01:21:10.871] Command failed
I0112 01:21:10.871] process 686 exited with code 1 after 24.4m
E0112 01:21:10.871] FAIL: pull-kubernetes-integration
I0112 01:21:10.872] Call:  gcloud auth activate-service-account --key-file=/etc/service-account/service-account.json
W0112 01:21:11.468] Activated service account credentials for: [pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com]
I0112 01:21:11.528] process 123609 exited with code 0 after 0.0m
I0112 01:21:11.528] Call:  gcloud config get-value account
I0112 01:21:11.908] process 123621 exited with code 0 after 0.0m
I0112 01:21:11.909] Will upload results to gs://kubernetes-jenkins/pr-logs using pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com
I0112 01:21:11.909] Upload result and artifacts...
I0112 01:21:11.909] Gubernator results at https://gubernator.k8s.io/build/kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41102
I0112 01:21:11.910] Call:  gsutil ls gs://kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41102/artifacts
W0112 01:21:13.248] CommandException: One or more URLs matched no objects.
E0112 01:21:13.423] Command failed
I0112 01:21:13.423] process 123633 exited with code 1 after 0.0m
W0112 01:21:13.423] Remote dir gs://kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41102/artifacts not exist yet
I0112 01:21:13.424] Call:  gsutil -m -q -o GSUtil:use_magicfile=True cp -r -c -z log,txt,xml /workspace/_artifacts gs://kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41102/artifacts
I0112 01:21:15.729] process 123775 exited with code 0 after 0.0m
W0112 01:21:15.729] metadata path /workspace/_artifacts/metadata.json does not exist
W0112 01:21:15.730] metadata not found or invalid, init with empty metadata
... skipping 23 lines ...