PRmsau42: Automated cherry pick of #71804: Move unmount volume util from pkg/volume/util to
ResultFAILURE
Tests 1 failed / 333 succeeded
Started2019-01-11 23:34
Elapsed21m48s
Revision
Buildergke-prow-containerd-pool-99179761-9sg5
Refs release-1.11:c6e60c04
72601:42a7310d
pod63c583b2-15f9-11e9-b9b3-0a580a6c0361
infra-commit2a90eab87
pod63c583b2-15f9-11e9-b9b3-0a580a6c0361
repok8s.io/kubernetes
repo-commit136dfd2ea969ed318ac7c2566a72519f6bc37c0f
repos{u'k8s.io/kubernetes': u'release-1.11:c6e60c047d0313bfc1e95efd9c6b989dcad05cd7,72601:42a7310d52857be6608ab99c30d3d8f4d421909a'}

Test Failures


k8s.io/kubernetes/test/integration/scheduler TestPreemptionRaces 10s

go test -v k8s.io/kubernetes/test/integration/scheduler -run TestPreemptionRaces$
I0111 23:52:20.224378  122348 services.go:33] Network range for service cluster IPs is unspecified. Defaulting to {10.0.0.0 ffffff00}.
I0111 23:52:20.224418  122348 master.go:278] Node port range unspecified. Defaulting to 30000-32767.
I0111 23:52:20.224431  122348 master.go:234] Using reconciler: 
W0111 23:52:20.310009  122348 genericapiserver.go:319] Skipping API batch/v2alpha1 because it has no resources.
W0111 23:52:20.324361  122348 genericapiserver.go:319] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
W0111 23:52:20.325202  122348 genericapiserver.go:319] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
W0111 23:52:20.326913  122348 genericapiserver.go:319] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
W0111 23:52:20.384445  122348 genericapiserver.go:319] Skipping API admissionregistration.k8s.io/v1alpha1 because it has no resources.
I0111 23:52:21.392023  122348 storage_scheduling.go:91] created PriorityClass system-node-critical with value 2000001000
I0111 23:52:21.394232  122348 storage_scheduling.go:91] created PriorityClass system-cluster-critical with value 2000000000
I0111 23:52:21.394257  122348 storage_scheduling.go:100] all system priority classes are created successfully or already exist.
I0111 23:52:21.400041  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/cluster-admin
I0111 23:52:21.402715  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:discovery
I0111 23:52:21.405282  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:basic-user
I0111 23:52:21.408522  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/admin
I0111 23:52:21.410889  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/edit
I0111 23:52:21.413662  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/view
I0111 23:52:21.417268  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-admin
I0111 23:52:21.420405  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-edit
I0111 23:52:21.423428  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-view
I0111 23:52:21.426194  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:heapster
I0111 23:52:21.444063  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node
I0111 23:52:21.447790  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-problem-detector
I0111 23:52:21.450986  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-proxier
I0111 23:52:21.454283  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kubelet-api-admin
I0111 23:52:21.456653  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-bootstrapper
I0111 23:52:21.459234  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:auth-delegator
I0111 23:52:21.461781  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-aggregator
I0111 23:52:21.464664  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-controller-manager
I0111 23:52:21.467478  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-scheduler
I0111 23:52:21.470207  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-dns
I0111 23:52:21.473028  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:persistent-volume-provisioner
I0111 23:52:21.475673  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:csi-external-provisioner
I0111 23:52:21.478475  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:csi-external-attacher
I0111 23:52:21.481010  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aws-cloud-provider
I0111 23:52:21.483716  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:nodeclient
I0111 23:52:21.486341  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient
I0111 23:52:21.488848  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:volume-scheduler
I0111 23:52:21.491471  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I0111 23:52:21.494244  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I0111 23:52:21.496980  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:cronjob-controller
I0111 23:52:21.499745  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I0111 23:52:21.502339  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:deployment-controller
I0111 23:52:21.505788  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:disruption-controller
I0111 23:52:21.508522  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:endpoint-controller
I0111 23:52:21.513056  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:expand-controller
I0111 23:52:21.515959  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I0111 23:52:21.518860  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I0111 23:52:21.521468  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:job-controller
I0111 23:52:21.524239  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:namespace-controller
I0111 23:52:21.526768  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:node-controller
I0111 23:52:21.529541  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I0111 23:52:21.532180  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I0111 23:52:21.534777  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:replicaset-controller
I0111 23:52:21.537430  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:replication-controller
I0111 23:52:21.540170  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I0111 23:52:21.542931  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:route-controller
I0111 23:52:21.545657  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:service-account-controller
I0111 23:52:21.548691  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:service-controller
I0111 23:52:21.551556  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:statefulset-controller
I0111 23:52:21.554621  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:ttl-controller
I0111 23:52:21.591308  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:certificate-controller
I0111 23:52:21.631738  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
E0111 23:52:21.665996  122348 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:43587/api/v1/namespaces: dial tcp 127.0.0.1:43587: connect: connection refused
I0111 23:52:21.671361  122348 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I0111 23:52:21.711982  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/cluster-admin
I0111 23:52:21.751234  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:discovery
I0111 23:52:21.791284  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:basic-user
I0111 23:52:21.831191  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:node-proxier
I0111 23:52:21.871201  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-controller-manager
I0111 23:52:21.911552  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-dns
I0111 23:52:21.951423  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-scheduler
I0111 23:52:21.991366  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:aws-cloud-provider
I0111 23:52:22.031107  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:node
I0111 23:52:22.071072  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:volume-scheduler
I0111 23:52:22.111327  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I0111 23:52:22.151554  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I0111 23:52:22.191530  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:cronjob-controller
I0111 23:52:22.231206  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I0111 23:52:22.271465  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:deployment-controller
I0111 23:52:22.326594  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:disruption-controller
I0111 23:52:22.350979  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:endpoint-controller
I0111 23:52:22.391391  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:expand-controller
I0111 23:52:22.431121  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I0111 23:52:22.471627  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I0111 23:52:22.512662  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:job-controller
I0111 23:52:22.551972  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:namespace-controller
I0111 23:52:22.592591  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:node-controller
I0111 23:52:22.631787  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I0111 23:52:22.671702  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I0111 23:52:22.711342  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replicaset-controller
I0111 23:52:22.751467  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replication-controller
I0111 23:52:22.791582  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I0111 23:52:22.834111  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:route-controller
I0111 23:52:22.871321  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-account-controller
I0111 23:52:22.911432  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-controller
I0111 23:52:22.951032  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:statefulset-controller
I0111 23:52:22.991284  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:ttl-controller
I0111 23:52:23.031254  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:certificate-controller
I0111 23:52:23.071470  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I0111 23:52:23.111124  122348 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I0111 23:52:23.151229  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/extension-apiserver-authentication-reader in kube-system
I0111 23:52:23.191320  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I0111 23:52:23.231296  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I0111 23:52:23.274713  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I0111 23:52:23.312221  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I0111 23:52:23.351286  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I0111 23:52:23.392392  122348 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I0111 23:52:23.431351  122348 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I0111 23:52:23.471207  122348 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I0111 23:52:23.512723  122348 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I0111 23:52:23.551085  122348 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I0111 23:52:23.591182  122348 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I0111 23:52:23.631224  122348 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
W0111 23:52:23.691258  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691564  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691649  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691696  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691733  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691756  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691778  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691817  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691851  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W0111 23:52:23.691903  122348 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
I0111 23:52:23.692389  122348 factory.go:1205] Created equivalence class cache
I0111 23:52:23.692438  122348 controller_utils.go:1025] Waiting for caches to sync for scheduler controller
I0111 23:52:23.792581  122348 controller_utils.go:1032] Caches are synced for scheduler controller
E0111 23:52:24.516178  122348 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:33389/api/v1/namespaces: dial tcp 127.0.0.1:33389: connect: connection refused
E0111 23:52:24.777432  122348 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:34767/api/v1/namespaces: dial tcp 127.0.0.1:34767: connect: connection refused
E0111 23:52:24.826476  122348 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:43559/api/v1/namespaces: dial tcp 127.0.0.1:43559: connect: connection refused
I0111 23:52:25.008091  122348 preemption_test.go:560] Creating the preemptor pod...
I0111 23:52:25.010529  122348 preemption_test.go:566] Creating additional pods...
I0111 23:52:25.468862  122348 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0111 23:52:25.600482  122348 preemption_test.go:597] Cleaning up all pods...
I0111 23:52:26.249721  122348 preemption_test.go:560] Creating the preemptor pod...
I0111 23:52:26.251728  122348 preemption_test.go:566] Creating additional pods...
E0111 23:52:26.320986  122348 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:38355/api/v1/namespaces: dial tcp 127.0.0.1:38355: connect: connection refused
I0111 23:52:26.606629  122348 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0111 23:52:26.768670  122348 preemption_test.go:597] Cleaning up all pods...
I0111 23:52:27.436506  122348 preemption_test.go:560] Creating the preemptor pod...
I0111 23:52:27.439430  122348 preemption_test.go:566] Creating additional pods...
I0111 23:52:27.808003  122348 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0111 23:52:27.933939  122348 preemption_test.go:597] Cleaning up all pods...
W0111 23:52:28.011823  122348 factory.go:1493] A pod preemption-racef110f5b4-15fb-11e9-84e6-0242ac110002/ppod-8 no longer exists
E0111 23:52:28.016552  122348 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-8" not found
E0111 23:52:28.030653  122348 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-11" not found
W0111 23:52:28.035522  122348 factory.go:1493] A pod preemption-racef110f5b4-15fb-11e9-84e6-0242ac110002/ppod-12 no longer exists
E0111 23:52:28.036983  122348 scheduler.go:223] Error getting the updated preemptor pod object: pods "ppod-12" not found
I0111 23:52:28.636729  122348 preemption_test.go:560] Creating the preemptor pod...
I0111 23:52:28.639811  122348 preemption_test.go:566] Creating additional pods...
I0111 23:52:29.224910  122348 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
E0111 23:52:29.276697  122348 controller.go:192] unable to sync kubernetes service: Post http://127.0.0.1:42789/api/v1/namespaces: dial tcp 127.0.0.1:42789: connect: connection refused
I0111 23:52:29.333683  122348 preemption_test.go:597] Cleaning up all pods...
I0111 23:52:30.007559  122348 preemption_test.go:560] Creating the preemptor pod...
I0111 23:52:30.010329  122348 preemption_test.go:566] Creating additional pods...
I0111 23:52:30.348972  122348 preemption_test.go:582] Check unschedulable pods still exists and were never scheduled...
I0111 23:52:30.449568  122348 preemption_test.go:597] Cleaning up all pods...
I0111 23:52:31.089093  122348 preemption_test.go:560] Creating the preemptor pod...
I0111 23:52:31.091261  122348 preemption_test.go:566] Creating additional pods...
preemption_test.go:570: Test [ensures that other pods are not scheduled while preemptor is being marked as nominated (issue #72124)]: Error creating pending pod: 0-length response
				from junit_cae8d27844a37937152775ec7fb068d1755ac188_20190111-234821.xml

Find preemption-racef110f5b4-15fb-11e9-84e6-0242ac110002/ppod-8 mentions in log files | View test history on testgrid


Show 333 Passed Tests

Show 4 Skipped Tests

Error lines from build-log.txt

... skipping 10 lines ...
I0111 23:34:47.902] process 215 exited with code 0 after 0.0m
I0111 23:34:47.903] Call:  gcloud config get-value account
I0111 23:34:48.195] process 227 exited with code 0 after 0.0m
I0111 23:34:48.196] Will upload results to gs://kubernetes-jenkins/pr-logs using pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com
I0111 23:34:48.196] Call:  kubectl get -oyaml pods/63c583b2-15f9-11e9-b9b3-0a580a6c0361
W0111 23:34:48.288] The connection to the server localhost:8080 was refused - did you specify the right host or port?
E0111 23:34:48.290] Command failed
I0111 23:34:48.290] process 239 exited with code 1 after 0.0m
E0111 23:34:48.290] unable to upload podspecs: Command '['kubectl', 'get', '-oyaml', 'pods/63c583b2-15f9-11e9-b9b3-0a580a6c0361']' returned non-zero exit status 1
I0111 23:34:48.291] Root: /workspace
I0111 23:34:48.291] cd to /workspace
I0111 23:34:48.291] Checkout: /workspace/k8s.io/kubernetes release-1.11:c6e60c047d0313bfc1e95efd9c6b989dcad05cd7,72601:42a7310d52857be6608ab99c30d3d8f4d421909a to /workspace/k8s.io/kubernetes
I0111 23:34:48.291] Call:  git init k8s.io/kubernetes
... skipping 504 lines ...
W0111 23:43:02.493] I0111 23:43:02.492843   73116 controller_utils.go:1025] Waiting for caches to sync for PV protection controller
W0111 23:43:02.493] I0111 23:43:02.492997   73116 replica_set.go:182] Starting replicationcontroller controller
W0111 23:43:02.493] I0111 23:43:02.493045   73116 controller_utils.go:1025] Waiting for caches to sync for ReplicationController controller
W0111 23:43:02.493] I0111 23:43:02.493183   73116 controllermanager.go:479] Started "csrapproving"
W0111 23:43:02.494] I0111 23:43:02.493296   73116 certificate_controller.go:113] Starting certificate controller
W0111 23:43:02.494] I0111 23:43:02.493340   73116 controller_utils.go:1025] Waiting for caches to sync for certificate controller
W0111 23:43:02.494] E0111 23:43:02.493713   73116 core.go:72] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail
W0111 23:43:02.494] W0111 23:43:02.493732   73116 controllermanager.go:476] Skipping "service"
W0111 23:43:02.499] I0111 23:43:02.499331   73116 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {extensions deployments}
W0111 23:43:02.499] I0111 23:43:02.499401   73116 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {apps controllerrevisions}
W0111 23:43:02.500] I0111 23:43:02.499428   73116 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {extensions replicasets}
W0111 23:43:02.500] I0111 23:43:02.499614   73116 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {policy poddisruptionbudgets}
W0111 23:43:02.500] I0111 23:43:02.499661   73116 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for {rbac.authorization.k8s.io rolebindings}
... skipping 46 lines ...
W0111 23:43:02.508] I0111 23:43:02.508460   73116 controllermanager.go:479] Started "persistentvolume-binder"
W0111 23:43:02.509] I0111 23:43:02.508726   73116 pv_controller_base.go:271] Starting persistent volume controller
W0111 23:43:02.509] I0111 23:43:02.508757   73116 controller_utils.go:1025] Waiting for caches to sync for persistent volume controller
W0111 23:43:02.515] I0111 23:43:02.515295   73116 controllermanager.go:479] Started "namespace"
W0111 23:43:02.515] I0111 23:43:02.515434   73116 namespace_controller.go:186] Starting namespace controller
W0111 23:43:02.516] I0111 23:43:02.515457   73116 controller_utils.go:1025] Waiting for caches to sync for namespace controller
W0111 23:43:02.516] W0111 23:43:02.515562   73116 garbagecollector.go:649] failed to discover preferred resources: the cache has not been filled yet
W0111 23:43:02.516] I0111 23:43:02.515853   73116 controllermanager.go:479] Started "garbagecollector"
W0111 23:43:02.516] I0111 23:43:02.515917   73116 garbagecollector.go:133] Starting garbage collector controller
W0111 23:43:02.516] I0111 23:43:02.515939   73116 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
W0111 23:43:02.516] I0111 23:43:02.515956   73116 graph_builder.go:308] GraphBuilder running
W0111 23:43:02.516] I0111 23:43:02.516120   73116 controllermanager.go:479] Started "cronjob"
W0111 23:43:02.516] I0111 23:43:02.516415   73116 controllermanager.go:479] Started "pvc-protection"
... skipping 42 lines ...
W0111 23:43:02.622] I0111 23:43:02.621936   73116 controller_utils.go:1032] Caches are synced for TTL controller
I0111 23:43:02.722] +++ [0111 23:43:02] On try 2, controller-manager: ok
I0111 23:43:02.745] node/127.0.0.1 created
I0111 23:43:02.757] +++ [0111 23:43:02] Checking kubectl version
I0111 23:43:02.839] Client Version: version.Info{Major:"1", Minor:"11+", GitVersion:"v1.11.7-beta.0.42+136dfd2ea969ed", GitCommit:"136dfd2ea969ed318ac7c2566a72519f6bc37c0f", GitTreeState:"clean", BuildDate:"2019-01-11T23:40:41Z", GoVersion:"go1.10.7", Compiler:"gc", Platform:"linux/amd64"}
I0111 23:43:02.839] Server Version: version.Info{Major:"1", Minor:"11+", GitVersion:"v1.11.7-beta.0.42+136dfd2ea969ed", GitCommit:"136dfd2ea969ed318ac7c2566a72519f6bc37c0f", GitTreeState:"clean", BuildDate:"2019-01-11T23:41:04Z", GoVersion:"go1.10.7", Compiler:"gc", Platform:"linux/amd64"}
W0111 23:43:02.940] W0111 23:43:02.746927   73116 actual_state_of_world.go:491] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist
W0111 23:43:02.940] I0111 23:43:02.801160   73116 controller_utils.go:1032] Caches are synced for resource quota controller
W0111 23:43:02.940] I0111 23:43:02.802989   73116 controller_utils.go:1032] Caches are synced for deployment controller
W0111 23:43:02.940] I0111 23:43:02.821625   73116 controller_utils.go:1032] Caches are synced for ReplicaSet controller
W0111 23:43:03.320] The Service "kubernetes" is invalid: spec.clusterIP: Invalid value: "10.0.0.1": provided IP is already allocated
I0111 23:43:03.462] NAME         TYPE        CLUSTER-IP   EXTERNAL-IP   PORT(S)   AGE
I0111 23:43:03.463] kubernetes   ClusterIP   10.0.0.1     <none>        443/TCP   41s
... skipping 83 lines ...
I0111 23:43:07.693] +++ [0111 23:43:07] Creating namespace namespace-1547250187-25644
I0111 23:43:07.770] namespace/namespace-1547250187-25644 created
I0111 23:43:07.847] Context "test" modified.
I0111 23:43:07.853] +++ [0111 23:43:07] Testing RESTMapper
W0111 23:43:07.954] I0111 23:43:07.607177   73116 node_lifecycle_controller.go:1095] Initializing eviction metric for zone: 
W0111 23:43:07.954] I0111 23:43:07.607258   73116 node_lifecycle_controller.go:945] Controller detected that all Nodes are not-Ready. Entering master disruption mode.
I0111 23:43:08.055] +++ [0111 23:43:07] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype"
I0111 23:43:08.055] +++ exit code: 0
I0111 23:43:08.119] NAME                              SHORTNAMES   APIGROUP                       NAMESPACED   KIND
I0111 23:43:08.119] bindings                                                                      true         Binding
I0111 23:43:08.119] componentstatuses                 cs                                          false        ComponentStatus
I0111 23:43:08.119] configmaps                        cm                                          true         ConfigMap
I0111 23:43:08.120] endpoints                         ep                                          true         Endpoints
... skipping 593 lines ...
I0111 23:43:27.705] test-cmd-util.sh:477: Successful get secrets --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:43:27.786] secret/test-secret created
I0111 23:43:27.887] test-cmd-util.sh:481: Successful get secret/test-secret --namespace=test-kubectl-describe-pod {{.metadata.name}}: test-secret
I0111 23:43:27.988] test-cmd-util.sh:482: Successful get secret/test-secret --namespace=test-kubectl-describe-pod {{.type}}: test-type
I0111 23:43:28.092] test-cmd-util.sh:486: Successful get configmaps --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:43:28.174] configmap/test-configmap created
W0111 23:43:28.275] error: resource(s) were provided, but no name, label selector, or --all flag specified
W0111 23:43:28.275] error: setting 'all' parameter but found a non empty selector. 
W0111 23:43:28.275] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0111 23:43:28.360] I0111 23:43:28.359723   68899 controller.go:597] quota admission added evaluator for: {policy poddisruptionbudgets}
I0111 23:43:28.461] test-cmd-util.sh:490: Successful get configmap/test-configmap --namespace=test-kubectl-describe-pod {{.metadata.name}}: test-configmap
I0111 23:43:28.461] poddisruptionbudget.policy/test-pdb-1 created
I0111 23:43:28.466] test-cmd-util.sh:496: Successful get pdb/test-pdb-1 --namespace=test-kubectl-describe-pod {{.spec.minAvailable}}: 2
I0111 23:43:28.545] poddisruptionbudget.policy/test-pdb-2 created
I0111 23:43:28.647] test-cmd-util.sh:500: Successful get pdb/test-pdb-2 --namespace=test-kubectl-describe-pod {{.spec.minAvailable}}: 50%
I0111 23:43:28.733] poddisruptionbudget.policy/test-pdb-3 created
I0111 23:43:28.836] test-cmd-util.sh:506: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2
I0111 23:43:28.913] poddisruptionbudget.policy/test-pdb-4 created
I0111 23:43:29.014] test-cmd-util.sh:510: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50%
I0111 23:43:29.189] test-cmd-util.sh:516: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:43:29.361] pod/env-test-pod created
W0111 23:43:29.462] error: min-available and max-unavailable cannot be both specified
I0111 23:43:29.562] test-cmd-util.sh:519: Successful describe pods --namespace=test-kubectl-describe-pod env-test-pod:
I0111 23:43:29.562] Name:               env-test-pod
I0111 23:43:29.563] Namespace:          test-kubectl-describe-pod
I0111 23:43:29.563] Priority:           0
I0111 23:43:29.563] PriorityClassName:  <none>
I0111 23:43:29.563] Node:               <none>
... skipping 161 lines ...
I0111 23:43:42.698] pod/valid-pod patched
I0111 23:43:42.787] test-cmd-util.sh:721: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: changed-with-yaml:
I0111 23:43:42.859] pod/valid-pod patched
I0111 23:43:42.949] test-cmd-util.sh:726: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.1:
I0111 23:43:43.103] pod/valid-pod patched
I0111 23:43:43.192] test-cmd-util.sh:742: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I0111 23:43:43.356] +++ [0111 23:43:43] "kubectl patch with resourceVersion 484" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again
I0111 23:43:43.571] pod "valid-pod" deleted
I0111 23:43:43.579] pod/valid-pod replaced
I0111 23:43:43.676] test-cmd-util.sh:766: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname
I0111 23:43:43.825] Successful
I0111 23:43:43.825] message:error: --grace-period must have --force specified
I0111 23:43:43.825] has:\-\-grace-period must have \-\-force specified
I0111 23:43:43.958] Successful
I0111 23:43:43.958] message:error: --timeout must have --force specified
I0111 23:43:43.958] has:\-\-timeout must have \-\-force specified
I0111 23:43:44.092] node/node-v1-test created
W0111 23:43:44.193] W0111 23:43:44.092072   73116 actual_state_of_world.go:491] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist
I0111 23:43:44.293] node/node-v1-test replaced
I0111 23:43:44.319] test-cmd-util.sh:803: Successful get node node-v1-test {{.metadata.annotations.a}}: b
I0111 23:43:44.397] node "node-v1-test" deleted
I0111 23:43:44.487] test-cmd-util.sh:810: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx:
I0111 23:43:44.726] test-cmd-util.sh:813: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/serve_hostname:
I0111 23:43:45.597] test-cmd-util.sh:826: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
... skipping 17 lines ...
I0111 23:43:45.893]     name: kubernetes-pause
I0111 23:43:45.893] has:localonlyvalue
I0111 23:43:45.911] test-cmd-util.sh:836: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I0111 23:43:46.108] test-cmd-util.sh:840: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I0111 23:43:46.196] test-cmd-util.sh:844: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod
I0111 23:43:46.272] pod/valid-pod labeled
W0111 23:43:46.373] error: 'name' already has a value (valid-pod), and --overwrite is false
I0111 23:43:46.473] test-cmd-util.sh:848: Successful get pod valid-pod {{.metadata.labels.name}}: valid-pod-super-sayan
I0111 23:43:46.474] test-cmd-util.sh:852: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0111 23:43:46.548] pod "valid-pod" force deleted
I0111 23:43:46.640] test-cmd-util.sh:856: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:43:46.645] +++ [0111 23:43:46] Creating namespace namespace-1547250226-19263
I0111 23:43:46.718] namespace/namespace-1547250226-19263 created
... skipping 81 lines ...
I0111 23:43:53.191] +++ Running case: test-cmd.run_kubectl_create_error_tests 
I0111 23:43:53.193] +++ working dir: /go/src/k8s.io/kubernetes
I0111 23:43:53.195] +++ command: run_kubectl_create_error_tests
I0111 23:43:53.204] +++ [0111 23:43:53] Creating namespace namespace-1547250233-3604
I0111 23:43:53.273] namespace/namespace-1547250233-3604 created
I0111 23:43:53.346] Context "test" modified.
I0111 23:43:53.351] +++ [0111 23:43:53] Testing kubectl create with error
W0111 23:43:53.452] Error: required flag(s) "filename" not set
W0111 23:43:53.452] 
W0111 23:43:53.452] 
W0111 23:43:53.452] Examples:
W0111 23:43:53.452]   # Create a pod using the data in pod.json.
W0111 23:43:53.453]   kubectl create -f ./pod.json
W0111 23:43:53.453]   
... skipping 38 lines ...
W0111 23:43:53.460]   kubectl create -f FILENAME [options]
W0111 23:43:53.460] 
W0111 23:43:53.461] Use "kubectl <command> --help" for more information about a given command.
W0111 23:43:53.461] Use "kubectl options" for a list of global command-line options (applies to all commands).
W0111 23:43:53.461] 
W0111 23:43:53.461] required flag(s) "filename" not set
I0111 23:43:53.571] +++ [0111 23:43:53] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false
I0111 23:43:53.742] +++ exit code: 0
I0111 23:43:53.773] Recording: run_kubectl_apply_tests
I0111 23:43:53.773] Running command: run_kubectl_apply_tests
I0111 23:43:53.788] 
I0111 23:43:53.790] +++ Running case: test-cmd.run_kubectl_apply_tests 
I0111 23:43:53.791] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 12 lines ...
I0111 23:43:55.609] deployment.extensions "test-deployment-retainkeys" deleted
I0111 23:43:55.697] test-cmd-util.sh:995: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:43:55.832] pod/selector-test-pod created
I0111 23:43:55.923] test-cmd-util.sh:999: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
I0111 23:43:56.002] Successful
I0111 23:43:56.002] message:No resources found.
I0111 23:43:56.002] Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
I0111 23:43:56.002] has:pods "selector-test-pod-dont-apply" not found
I0111 23:43:56.076] pod "selector-test-pod" deleted
I0111 23:43:56.165] test-cmd-util.sh:1009: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:43:56.298] pod/a created
W0111 23:43:56.399] I0111 23:43:54.653713   68899 controller.go:597] quota admission added evaluator for: {extensions deployments}
W0111 23:43:56.399] I0111 23:43:54.671812   68899 controller.go:597] quota admission added evaluator for: {apps replicasets}
... skipping 3 lines ...
W0111 23:43:56.400] I0111 23:43:55.214525   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250233-3091", Name:"test-deployment-retainkeys-7fb69956c", UID:"c1aad42a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"487", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: test-deployment-retainkeys-7fb69956c-m4pxm
W0111 23:43:56.400] I0111 23:43:55.224114   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250233-3091", Name:"test-deployment-retainkeys", UID:"c1a81328-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"494", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set test-deployment-retainkeys-5f667997fd to 1
W0111 23:43:56.401] I0111 23:43:55.226744   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250233-3091", Name:"test-deployment-retainkeys-5f667997fd", UID:"c1fee0b2-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"496", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-deployment-retainkeys-5f667997fd-x4nsx
I0111 23:43:57.803] test-cmd-util.sh:1014: Successful get pods a {{.metadata.name}}: a
I0111 23:43:57.893] Successful
I0111 23:43:57.893] message:No resources found.
I0111 23:43:57.893] Error from server (NotFound): pods "b" not found
I0111 23:43:57.893] has:pods "b" not found
I0111 23:43:58.044] pod/b created
I0111 23:43:58.056] pod/a pruned
I0111 23:43:59.751] test-cmd-util.sh:1022: Successful get pods b {{.metadata.name}}: b
I0111 23:43:59.837] Successful
I0111 23:43:59.838] message:No resources found.
I0111 23:43:59.838] Error from server (NotFound): pods "a" not found
I0111 23:43:59.838] has:pods "a" not found
I0111 23:43:59.919] pod "b" deleted
I0111 23:44:00.017] test-cmd-util.sh:1032: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:00.163] pod/a created
I0111 23:44:00.261] test-cmd-util.sh:1037: Successful get pods a {{.metadata.name}}: a
I0111 23:44:00.353] Successful
I0111 23:44:00.354] message:No resources found.
I0111 23:44:00.354] Error from server (NotFound): pods "b" not found
I0111 23:44:00.354] has:pods "b" not found
I0111 23:44:00.513] pod/b created
I0111 23:44:00.610] test-cmd-util.sh:1045: Successful get pods a {{.metadata.name}}: a
I0111 23:44:00.704] test-cmd-util.sh:1046: Successful get pods b {{.metadata.name}}: b
I0111 23:44:00.788] pod "a" deleted
I0111 23:44:00.791] pod "b" deleted
I0111 23:44:00.942] Successful
I0111 23:44:00.943] message:error: all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector.
I0111 23:44:00.943] has:all resources selected for prune without explicitly passing --all
I0111 23:44:01.090] pod/a created
I0111 23:44:01.095] pod/b created
I0111 23:44:01.102] service/prune-svc created
I0111 23:44:02.610] test-cmd-util.sh:1058: Successful get pods a {{.metadata.name}}: a
I0111 23:44:02.704] test-cmd-util.sh:1059: Successful get pods b {{.metadata.name}}: b
... skipping 125 lines ...
I0111 23:44:13.658] test-cmd-util.sh:1101: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:13.817] pod/selector-test-pod created
W0111 23:44:13.917] I0111 23:44:12.817354   68899 controller.go:597] quota admission added evaluator for: {batch cronjobs}
I0111 23:44:14.018] test-cmd-util.sh:1105: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod
I0111 23:44:14.018] Successful
I0111 23:44:14.018] message:No resources found.
I0111 23:44:14.019] Error from server (NotFound): pods "selector-test-pod-dont-apply" not found
I0111 23:44:14.019] has:pods "selector-test-pod-dont-apply" not found
I0111 23:44:14.105] pod "selector-test-pod" deleted
I0111 23:44:14.125] +++ exit code: 0
I0111 23:44:14.163] Recording: run_kubectl_apply_deployments_tests
I0111 23:44:14.163] Running command: run_kubectl_apply_deployments_tests
I0111 23:44:14.182] 
... skipping 37 lines ...
W0111 23:44:16.762] I0111 23:44:16.663536   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250254-11634", Name:"nginx", UID:"cec5d4b5-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"663", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-74d9fbb98 to 3
W0111 23:44:16.762] I0111 23:44:16.665898   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250254-11634", Name:"nginx-74d9fbb98", UID:"cec64718-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"665", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74d9fbb98-ljc8h
W0111 23:44:16.763] I0111 23:44:16.668155   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250254-11634", Name:"nginx-74d9fbb98", UID:"cec64718-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"665", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74d9fbb98-qstlh
W0111 23:44:16.763] I0111 23:44:16.668431   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250254-11634", Name:"nginx-74d9fbb98", UID:"cec64718-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"665", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-74d9fbb98-zg7c7
I0111 23:44:16.864] test-cmd-util.sh:1160: Successful get deployment nginx {{.metadata.name}}: nginx
I0111 23:44:20.968] Successful
I0111 23:44:20.968] message:Error from server (Conflict): error when applying patch:
I0111 23:44:20.968] {"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1547250254-11634\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}}
I0111 23:44:20.968] to:
I0111 23:44:20.969] Resource: "extensions/v1beta1, Resource=deployments", GroupVersionKind: "extensions/v1beta1, Kind=Deployment"
I0111 23:44:20.969] Name: "nginx", Namespace: "namespace-1547250254-11634"
I0111 23:44:20.970] Object: &{map["apiVersion":"extensions/v1beta1" "metadata":map["name":"nginx" "namespace":"namespace-1547250254-11634" "selfLink":"/apis/extensions/v1beta1/namespaces/namespace-1547250254-11634/deployments/nginx" "annotations":map["deployment.kubernetes.io/revision":"1" "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1547250254-11634\"},\"spec\":{\"replicas\":3,\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx1\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"] "uid":"cec5d4b5-15fa-11e9-befc-0242ac110002" "resourceVersion":"677" "generation":'\x01' "creationTimestamp":"2019-01-11T23:44:16Z" "labels":map["name":"nginx"]] "spec":map["replicas":'\x03' "selector":map["matchLabels":map["name":"nginx1"]] "template":map["metadata":map["creationTimestamp":<nil> "labels":map["name":"nginx1"]] "spec":map["securityContext":map[] "schedulerName":"default-scheduler" "containers":[map["name":"nginx" "image":"k8s.gcr.io/nginx:test-cmd" "ports":[map["containerPort":'P' "protocol":"TCP"]] "resources":map[] "terminationMessagePath":"/dev/termination-log" "terminationMessagePolicy":"File" "imagePullPolicy":"IfNotPresent"]] "restartPolicy":"Always" "terminationGracePeriodSeconds":'\x1e' "dnsPolicy":"ClusterFirst"]] "strategy":map["type":"RollingUpdate" "rollingUpdate":map["maxUnavailable":'\x01' "maxSurge":'\x01']] "revisionHistoryLimit":'\n' "progressDeadlineSeconds":'\u0258'] "status":map["replicas":'\x03' "updatedReplicas":'\x03' "unavailableReplicas":'\x03' "conditions":[map["type":"Available" "status":"False" "lastUpdateTime":"2019-01-11T23:44:16Z" "lastTransitionTime":"2019-01-11T23:44:16Z" "reason":"MinimumReplicasUnavailable" "message":"Deployment does not have minimum availability."] map["type":"Progressing" "status":"True" "lastUpdateTime":"2019-01-11T23:44:16Z" "lastTransitionTime":"2019-01-11T23:44:16Z" "reason":"ReplicaSetUpdated" "message":"ReplicaSet \"nginx-74d9fbb98\" is progressing."]] "observedGeneration":'\x01'] "kind":"Deployment"]}
I0111 23:44:20.970] for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.extensions "nginx": the object has been modified; please apply your changes to the latest version and try again
I0111 23:44:20.970] has:Error from server (Conflict)
W0111 23:44:22.644] I0111 23:44:22.644228   73116 horizontal.go:366] Horizontal Pod Autoscaler has been deleted namespace-1547250230-12679/frontend
W0111 23:44:25.176] E0111 23:44:25.176009   73116 replica_set.go:450] Sync "namespace-1547250254-11634/nginx-74d9fbb98" failed with Operation cannot be fulfilled on replicasets.apps "nginx-74d9fbb98": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1547250254-11634/nginx-74d9fbb98, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: cec64718-15fa-11e9-befc-0242ac110002, UID in object meta: 
I0111 23:44:26.162] deployment.extensions/nginx configured
W0111 23:44:26.262] I0111 23:44:26.165604   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250254-11634", Name:"nginx", UID:"d46f85b0-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"698", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-d7576cc9 to 3
W0111 23:44:26.263] I0111 23:44:26.168600   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250254-11634", Name:"nginx-d7576cc9", UID:"d4702644-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"699", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-d7576cc9-phlzs
W0111 23:44:26.263] I0111 23:44:26.170935   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250254-11634", Name:"nginx-d7576cc9", UID:"d4702644-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"699", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-d7576cc9-cxmzk
W0111 23:44:26.263] I0111 23:44:26.171606   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250254-11634", Name:"nginx-d7576cc9", UID:"d4702644-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"699", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-d7576cc9-gx7g4
I0111 23:44:26.364] Successful
... skipping 147 lines ...
I0111 23:44:33.482] namespace/namespace-1547250273-13325 created
I0111 23:44:33.559] Context "test" modified.
I0111 23:44:33.566] +++ [0111 23:44:33] Testing kubectl get
I0111 23:44:33.666] test-cmd-util.sh:1502: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:33.758] Successful
I0111 23:44:33.758] message:No resources found.
I0111 23:44:33.758] Error from server (NotFound): pods "abc" not found
I0111 23:44:33.758] has:pods "abc" not found
I0111 23:44:33.855] test-cmd-util.sh:1510: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:33.950] Successful
I0111 23:44:33.951] message:Error from server (NotFound): pods "abc" not found
I0111 23:44:33.951] has:pods "abc" not found
I0111 23:44:34.047] test-cmd-util.sh:1518: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:34.140] Successful
I0111 23:44:34.141] message:{
I0111 23:44:34.141]     "apiVersion": "v1",
I0111 23:44:34.141]     "items": [],
... skipping 33 lines ...
I0111 23:44:34.882] has not:No resources found
I0111 23:44:34.973] Successful
I0111 23:44:34.973] message:No resources found.
I0111 23:44:34.974] has:No resources found
I0111 23:44:35.076] test-cmd-util.sh:1562: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:35.172] Successful
I0111 23:44:35.172] message:Error from server (NotFound): pods "abc" not found
I0111 23:44:35.172] has:pods "abc" not found
I0111 23:44:35.174] FAIL!
I0111 23:44:35.174] message:Error from server (NotFound): pods "abc" not found
I0111 23:44:35.175] has not:List
I0111 23:44:35.175] 1568 /go/src/k8s.io/kubernetes/hack/make-rules/test-cmd-util.sh
I0111 23:44:35.293] Successful
I0111 23:44:35.294] message:I0111 23:44:35.243921   85102 loader.go:359] Config loaded from file /tmp/tmp.G2JqX65BPv/.kube/config
I0111 23:44:35.294] I0111 23:44:35.244403   85102 loader.go:359] Config loaded from file /tmp/tmp.G2JqX65BPv/.kube/config
I0111 23:44:35.294] I0111 23:44:35.245757   85102 round_trippers.go:405] GET http://127.0.0.1:8080/version?timeout=32s 200 OK in 1 milliseconds
... skipping 992 lines ...
I0111 23:44:38.677] }
I0111 23:44:38.773] test-cmd-util.sh:1621: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0111 23:44:39.037] <no value>Successful
I0111 23:44:39.038] message:valid-pod:
I0111 23:44:39.038] has:valid-pod:
I0111 23:44:39.126] Successful
I0111 23:44:39.126] message:Error executing template: missing is not found. Printing more information for debugging the template:
I0111 23:44:39.126] 	template was:
I0111 23:44:39.126] 		{.missing}
I0111 23:44:39.126] 	object given to jsonpath engine was:
I0111 23:44:39.127] 		map[string]interface {}{"apiVersion":"v1", "metadata":map[string]interface {}{"name":"valid-pod", "namespace":"namespace-1547250278-17535", "selfLink":"/api/v1/namespaces/namespace-1547250278-17535/pods/valid-pod", "uid":"dbd5ae0f-15fa-11e9-befc-0242ac110002", "resourceVersion":"770", "creationTimestamp":"2019-01-11T23:44:38Z", "labels":map[string]interface {}{"name":"valid-pod"}}, "spec":map[string]interface {}{"securityContext":map[string]interface {}{}, "schedulerName":"default-scheduler", "priority":0, "containers":[]interface {}{map[string]interface {}{"name":"kubernetes-serve-hostname", "image":"k8s.gcr.io/serve_hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}, "terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File", "imagePullPolicy":"Always"}}, "restartPolicy":"Always", "terminationGracePeriodSeconds":30, "dnsPolicy":"ClusterFirst"}, "status":map[string]interface {}{"qosClass":"Guaranteed", "phase":"Pending"}, "kind":"Pod"}
I0111 23:44:39.127] has:missing is not found
I0111 23:44:39.216] Successful
I0111 23:44:39.217] message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template:
I0111 23:44:39.217] 	template was:
I0111 23:44:39.217] 		{{.missing}}
I0111 23:44:39.217] 	raw data was:
I0111 23:44:39.217] 		{"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2019-01-11T23:44:38Z","labels":{"name":"valid-pod"},"name":"valid-pod","namespace":"namespace-1547250278-17535","resourceVersion":"770","selfLink":"/api/v1/namespaces/namespace-1547250278-17535/pods/valid-pod","uid":"dbd5ae0f-15fa-11e9-befc-0242ac110002"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}}
I0111 23:44:39.217] 	object given to template engine was:
I0111 23:44:39.218] 		map[apiVersion:v1 kind:Pod metadata:map[creationTimestamp:2019-01-11T23:44:38Z labels:map[name:valid-pod] name:valid-pod namespace:namespace-1547250278-17535 resourceVersion:770 selfLink:/api/v1/namespaces/namespace-1547250278-17535/pods/valid-pod uid:dbd5ae0f-15fa-11e9-befc-0242ac110002] spec:map[securityContext:map[] terminationGracePeriodSeconds:30 containers:[map[image:k8s.gcr.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst priority:0 restartPolicy:Always schedulerName:default-scheduler] status:map[phase:Pending qosClass:Guaranteed]]
I0111 23:44:39.218] has:map has no entry for key "missing"
W0111 23:44:39.318] error: error executing jsonpath "{.missing}": missing is not found
W0111 23:44:39.319] error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing"
W0111 23:44:40.303] E0111 23:44:40.302158   85454 streamwatcher.go:109] Unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body)
I0111 23:44:40.403] Successful
I0111 23:44:40.403] message:NAME        READY     STATUS    RESTARTS   AGE
I0111 23:44:40.404] valid-pod   0/1       Pending   0          1s
I0111 23:44:40.404] has:STATUS
I0111 23:44:40.404] Successful
... skipping 78 lines ...
I0111 23:44:42.600]   terminationGracePeriodSeconds: 30
I0111 23:44:42.600] status:
I0111 23:44:42.600]   phase: Pending
I0111 23:44:42.600]   qosClass: Guaranteed
I0111 23:44:42.601] has:name: valid-pod
I0111 23:44:42.601] Successful
I0111 23:44:42.601] message:Error from server (NotFound): pods "invalid-pod" not found
I0111 23:44:42.601] has:"invalid-pod" not found
I0111 23:44:42.682] pod "valid-pod" deleted
I0111 23:44:42.786] test-cmd-util.sh:1659: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:44:42.935] pod/redis-master created
I0111 23:44:42.939] pod/valid-pod created
I0111 23:44:43.036] Successful
... skipping 237 lines ...
I0111 23:44:44.169] namespace-1547250273-13325   11s
I0111 23:44:44.169] namespace-1547250278-17535   6s
I0111 23:44:44.169] namespace-1547250283-4358    1s
I0111 23:44:44.169] has:application/json
W0111 23:44:44.300] I0111 23:44:44.300321   68899 controller.go:597] quota admission added evaluator for: {extensions daemonsets}
W0111 23:44:44.315] I0111 23:44:44.314607   68899 controller.go:597] quota admission added evaluator for: {apps controllerrevisions}
W0111 23:44:44.318] I0111 23:44:44.318165   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250283-4358", Name:"bind", UID:"df3f8a1a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"786", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:44:44.319] I0111 23:44:44.318207   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250283-4358", Name:"bind", UID:"df3f8a1a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"786", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:44:44.319] I0111 23:44:44.318216   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250283-4358", Name:"bind", UID:"df3f8a1a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"786", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:44:44.322] I0111 23:44:44.321774   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250283-4358", Name:"bind", UID:"df3f8a1a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"789", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:44:44.322] I0111 23:44:44.321813   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250283-4358", Name:"bind", UID:"df3f8a1a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"789", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:44:44.323] I0111 23:44:44.321823   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250283-4358", Name:"bind", UID:"df3f8a1a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"789", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0111 23:44:44.423] daemonset.extensions/bind created
I0111 23:44:44.423] test-cmd-util.sh:1404: Successful get ds {{range.items}}{{.metadata.name}}:{{end}}: bind:
I0111 23:44:44.606] Successful
I0111 23:44:44.606] message:NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR
I0111 23:44:44.606] bind 1 0 0 0 0 <none>
I0111 23:44:44.606] has:NAME DESIRED CURRENT READY UP-TO-DATE AVAILABLE NODE SELECTOR
... skipping 57 lines ...
I0111 23:44:52.633] 
I0111 23:44:52.635] +++ Running case: test-cmd.run_create_secret_tests 
I0111 23:44:52.637] +++ working dir: /go/src/k8s.io/kubernetes
I0111 23:44:52.640] +++ command: run_create_secret_tests
I0111 23:44:52.741] Successful
I0111 23:44:52.741] message:No resources found.
I0111 23:44:52.741] Error from server (NotFound): secrets "mysecret" not found
I0111 23:44:52.742] has:secrets "mysecret" not found
I0111 23:44:52.918] Successful
I0111 23:44:52.919] message:No resources found.
I0111 23:44:52.919] Error from server (NotFound): secrets "mysecret" not found
I0111 23:44:52.919] has:secrets "mysecret" not found
I0111 23:44:52.921] Successful
I0111 23:44:52.921] message:user-specified
I0111 23:44:52.921] has:user-specified
I0111 23:44:53.003] Successful
I0111 23:44:53.086] {"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-create-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-create-cm","uid":"e47b7c25-15fa-11e9-befc-0242ac110002","resourceVersion":"859","creationTimestamp":"2019-01-11T23:44:53Z"}}
... skipping 101 lines ...
I0111 23:44:55.812] test-cmd-util.sh:1953: Successful get customresourcedefinitions {{range.items}}{{.metadata.name}}:{{end}}: foos.company.com:
I0111 23:44:55.961] customresourcedefinition.apiextensions.k8s.io/bars.company.com created
I0111 23:44:56.067] test-cmd-util.sh:1975: Successful get customresourcedefinitions {{range.items}}{{.metadata.name}}:{{end}}: bars.company.com:foos.company.com:
I0111 23:44:56.221] customresourcedefinition.apiextensions.k8s.io/resources.mygroup.example.com created
I0111 23:44:56.325] test-cmd-util.sh:2002: Successful get customresourcedefinitions {{range.items}}{{.metadata.name}}:{{end}}: bars.company.com:foos.company.com:resources.mygroup.example.com:
I0111 23:44:56.479] customresourcedefinition.apiextensions.k8s.io/validfoos.company.com created
W0111 23:44:56.579] E0111 23:44:56.224801   68899 autoregister_controller.go:190] v1alpha1.mygroup.example.com failed with : apiservices.apiregistration.k8s.io "v1alpha1.mygroup.example.com" already exists
I0111 23:44:56.680] test-cmd-util.sh:2037: Successful get customresourcedefinitions {{range.items}}{{.metadata.name}}:{{end}}: bars.company.com:foos.company.com:resources.mygroup.example.com:validfoos.company.com:
I0111 23:44:56.680] +++ [0111 23:44:56] Creating namespace namespace-1547250296-5212
I0111 23:44:56.680] namespace/namespace-1547250296-5212 created
I0111 23:44:56.747] Context "test" modified.
I0111 23:44:56.753] +++ [0111 23:44:56] Testing kubectl non-native resources
I0111 23:44:56.831] {"kind":"APIResourceList","apiVersion":"v1","groupVersion":"company.com/v1","resources":[{"name":"validfoos","singularName":"validfoo","namespaced":true,"kind":"ValidFoo","verbs":["delete","deletecollection","get","list","patch","create","update","watch"]},{"name":"foos","singularName":"foo","namespaced":true,"kind":"Foo","verbs":["delete","deletecollection","get","list","patch","create","update","watch"]},{"name":"bars","singularName":"bar","namespaced":true,"kind":"Bar","verbs":["delete","deletecollection","get","list","patch","create","update","watch"]}]}
... skipping 132 lines ...
I0111 23:45:00.002] foo.company.com/test patched
I0111 23:45:00.102] test-cmd-util.sh:2143: Successful get foos/test {{.patched}}: value1
I0111 23:45:00.193] foo.company.com/test patched
I0111 23:45:00.293] test-cmd-util.sh:2145: Successful get foos/test {{.patched}}: value2
I0111 23:45:00.385] foo.company.com/test patched
I0111 23:45:00.487] test-cmd-util.sh:2147: Successful get foos/test {{.patched}}: <no value>
I0111 23:45:00.665] +++ [0111 23:45:00] "kubectl patch --local" returns error as expected for CustomResource: error: cannot apply strategic merge patch for company.com/v1, Kind=Foo locally, try --type merge
I0111 23:45:00.742] {
I0111 23:45:00.743]     "apiVersion": "company.com/v1",
I0111 23:45:00.743]     "kind": "Foo",
I0111 23:45:00.743]     "metadata": {
I0111 23:45:00.743]         "annotations": {
I0111 23:45:00.743]             "kubernetes.io/change-cause": "kubectl patch foos/test --server=http://127.0.0.1:8080 --match-server-version=true --patch={\"patched\":null} --type=merge --record=true"
... skipping 112 lines ...
I0111 23:45:02.248] has:bar.company.com/test
I0111 23:45:02.338] bar.company.com "test" deleted
W0111 23:45:02.439] /go/src/k8s.io/kubernetes/hack/lib/test.sh: line 264: 88003 Killed                  while [ ${tries} -lt 10 ]; do
W0111 23:45:02.439]     tries=$((tries+1)); kubectl "${kube_flags[@]}" patch bars/test -p "{\"patched\":\"${tries}\"}" --type=merge; sleep 1;
W0111 23:45:02.440] done
W0111 23:45:02.440] /go/src/k8s.io/kubernetes/hack/make-rules/test-cmd-util.sh: line 2201: 88002 Killed                  kubectl "${kube_flags[@]}" get bars --request-timeout=1m --watch-only -o name
W0111 23:45:03.917] E0111 23:45:03.916550   73116 resource_quota_controller.go:460] failed to sync resource monitors: [couldn't start monitor for resource {"company.com" "v1" "foos"}: unable to monitor quota for resource "company.com/v1, Resource=foos", couldn't start monitor for resource {"company.com" "v1" "bars"}: unable to monitor quota for resource "company.com/v1, Resource=bars", couldn't start monitor for resource {"company.com" "v1" "validfoos"}: unable to monitor quota for resource "company.com/v1, Resource=validfoos", couldn't start monitor for resource {"mygroup.example.com" "v1alpha1" "resources"}: unable to monitor quota for resource "mygroup.example.com/v1alpha1, Resource=resources"]
W0111 23:45:04.061] I0111 23:45:04.060965   73116 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
W0111 23:45:04.162] I0111 23:45:04.161414   73116 controller_utils.go:1032] Caches are synced for garbage collector controller
I0111 23:45:04.267] test-cmd-util.sh:2227: Successful get bars {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:04.423] foo.company.com/test created
I0111 23:45:04.527] test-cmd-util.sh:2233: Successful get foos {{range.items}}{{.metadata.name}}:{{end}}: test:
I0111 23:45:04.625] test-cmd-util.sh:2236: Successful get foos/test {{.someField}}: field1
... skipping 58 lines ...
I0111 23:45:10.580] bar.company.com/test created
I0111 23:45:10.685] test-cmd-util.sh:2362: Successful get bars {{len .items}}: 1
I0111 23:45:10.771] namespace "non-native-resources" deleted
I0111 23:45:15.989] test-cmd-util.sh:2365: Successful get bars {{len .items}}: 0
I0111 23:45:16.170] customresourcedefinition.apiextensions.k8s.io "foos.company.com" deleted
W0111 23:45:16.271] No resources found.
W0111 23:45:16.271] Error from server (NotFound): namespaces "non-native-resources" not found
I0111 23:45:16.371] customresourcedefinition.apiextensions.k8s.io "bars.company.com" deleted
I0111 23:45:16.379] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted
I0111 23:45:16.482] customresourcedefinition.apiextensions.k8s.io "validfoos.company.com" deleted
I0111 23:45:16.512] +++ exit code: 0
I0111 23:45:16.595] Recording: run_cmd_with_img_tests
I0111 23:45:16.596] Running command: run_cmd_with_img_tests
... skipping 9 lines ...
W0111 23:45:16.903] I0111 23:45:16.896396   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250316-20346", Name:"test1-7f54676899", UID:"f2ac0e80-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"966", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test1-7f54676899-8pmgd
I0111 23:45:17.003] Successful
I0111 23:45:17.003] message:deployment.apps/test1 created
I0111 23:45:17.003] has:deployment.apps/test1 created
I0111 23:45:17.004] deployment.extensions "test1" deleted
I0111 23:45:17.067] Successful
I0111 23:45:17.067] message:error: Invalid image name "InvalidImageName": invalid reference format
I0111 23:45:17.067] has:error: Invalid image name "InvalidImageName": invalid reference format
I0111 23:45:17.082] +++ exit code: 0
I0111 23:45:17.137] Recording: run_recursive_resources_tests
I0111 23:45:17.137] Running command: run_recursive_resources_tests
I0111 23:45:17.158] 
I0111 23:45:17.159] +++ Running case: test-cmd.run_recursive_resources_tests 
I0111 23:45:17.162] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 4 lines ...
I0111 23:45:17.328] Context "test" modified.
I0111 23:45:17.428] test-cmd-util.sh:2385: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:17.686] test-cmd-util.sh:2389: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:17.688] Successful
I0111 23:45:17.689] message:pod/busybox0 created
I0111 23:45:17.689] pod/busybox1 created
I0111 23:45:17.689] error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0111 23:45:17.689] has:error validating data: kind not set
I0111 23:45:17.786] test-cmd-util.sh:2394: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:17.980] test-cmd-util.sh:2402: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox:
I0111 23:45:17.982] Successful
I0111 23:45:17.983] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:17.983] has:Object 'Kind' is missing
I0111 23:45:18.100] test-cmd-util.sh:2409: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:18.379] test-cmd-util.sh:2413: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I0111 23:45:18.381] Successful
I0111 23:45:18.382] message:pod/busybox0 replaced
I0111 23:45:18.382] pod/busybox1 replaced
I0111 23:45:18.382] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0111 23:45:18.382] has:error validating data: kind not set
I0111 23:45:18.484] test-cmd-util.sh:2418: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:18.592] Successful
I0111 23:45:18.592] message:Name:               busybox0
I0111 23:45:18.592] Namespace:          namespace-1547250317-15953
I0111 23:45:18.592] Priority:           0
I0111 23:45:18.592] PriorityClassName:  <none>
... skipping 159 lines ...
I0111 23:45:18.605] has:Object 'Kind' is missing
I0111 23:45:18.701] test-cmd-util.sh:2428: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:18.897] test-cmd-util.sh:2432: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue:
I0111 23:45:18.899] Successful
I0111 23:45:18.900] message:pod/busybox0 annotated
I0111 23:45:18.900] pod/busybox1 annotated
I0111 23:45:18.900] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:18.900] has:Object 'Kind' is missing
I0111 23:45:18.997] test-cmd-util.sh:2437: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:19.267] test-cmd-util.sh:2441: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced:
I0111 23:45:19.269] Successful
I0111 23:45:19.269] message:Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I0111 23:45:19.270] pod/busybox0 configured
I0111 23:45:19.270] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
I0111 23:45:19.270] pod/busybox1 configured
I0111 23:45:19.270] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
I0111 23:45:19.270] has:error validating data: kind not set
I0111 23:45:19.367] test-cmd-util.sh:2447: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:19.512] deployment.extensions/nginx created
W0111 23:45:19.613] I0111 23:45:19.515442   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250317-15953", Name:"nginx", UID:"f43c0cd4-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"990", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-794c6b99b4 to 3
W0111 23:45:19.613] I0111 23:45:19.517381   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx-794c6b99b4", UID:"f43c9b7a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"991", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-794c6b99b4-6j9kl
W0111 23:45:19.614] I0111 23:45:19.519903   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx-794c6b99b4", UID:"f43c9b7a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"991", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-794c6b99b4-lbk5m
W0111 23:45:19.614] I0111 23:45:19.520002   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx-794c6b99b4", UID:"f43c9b7a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"991", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-794c6b99b4-xs8q9
... skipping 42 lines ...
I0111 23:45:19.904] status: {}
I0111 23:45:19.904] has:apps/v1beta1
I0111 23:45:19.984] deployment.extensions "nginx" deleted
I0111 23:45:20.106] test-cmd-util.sh:2463: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:20.285] test-cmd-util.sh:2467: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:20.287] Successful
I0111 23:45:20.287] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:20.287] has:Object 'Kind' is missing
I0111 23:45:20.385] test-cmd-util.sh:2472: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:20.477] Successful
I0111 23:45:20.478] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:20.478] has:busybox0:busybox1:
I0111 23:45:20.480] Successful
I0111 23:45:20.480] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:20.480] has:Object 'Kind' is missing
I0111 23:45:20.581] test-cmd-util.sh:2481: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:20.677] pod/busybox0 labeled pod/busybox1 labeled error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:20.780] test-cmd-util.sh:2486: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue:
I0111 23:45:20.782] Successful
I0111 23:45:20.782] message:pod/busybox0 labeled
I0111 23:45:20.783] pod/busybox1 labeled
I0111 23:45:20.783] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:20.783] has:Object 'Kind' is missing
I0111 23:45:20.885] test-cmd-util.sh:2491: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:20.977] pod/busybox0 patched pod/busybox1 patched error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:21.079] test-cmd-util.sh:2496: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox:
I0111 23:45:21.081] Successful
I0111 23:45:21.081] message:pod/busybox0 patched
I0111 23:45:21.081] pod/busybox1 patched
I0111 23:45:21.081] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:21.082] has:Object 'Kind' is missing
I0111 23:45:21.181] test-cmd-util.sh:2501: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:21.374] test-cmd-util.sh:2505: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:21.376] Successful
I0111 23:45:21.376] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0111 23:45:21.376] pod "busybox0" force deleted
I0111 23:45:21.376] pod "busybox1" force deleted
I0111 23:45:21.377] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}'
I0111 23:45:21.377] has:Object 'Kind' is missing
I0111 23:45:21.475] test-cmd-util.sh:2510: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:21.634] replicationcontroller/busybox0 created
I0111 23:45:21.637] replicationcontroller/busybox1 created
W0111 23:45:21.738] I0111 23:45:20.886545   73116 namespace_controller.go:171] Namespace has been deleted non-native-resources
W0111 23:45:21.738] I0111 23:45:21.637019   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250317-15953", Name:"busybox0", UID:"f57fe219-15fa-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1021", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-v649c
W0111 23:45:21.739] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0111 23:45:21.739] I0111 23:45:21.640504   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250317-15953", Name:"busybox1", UID:"f5808953-15fa-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1023", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-49s8k
I0111 23:45:21.839] test-cmd-util.sh:2514: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:21.846] test-cmd-util.sh:2519: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:21.940] test-cmd-util.sh:2520: Successful get rc busybox0 {{.spec.replicas}}: 1
I0111 23:45:22.037] test-cmd-util.sh:2521: Successful get rc busybox1 {{.spec.replicas}}: 1
I0111 23:45:22.229] test-cmd-util.sh:2526: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I0111 23:45:22.325] test-cmd-util.sh:2527: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80
I0111 23:45:22.327] Successful
I0111 23:45:22.327] message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled
I0111 23:45:22.327] horizontalpodautoscaler.autoscaling/busybox1 autoscaled
I0111 23:45:22.328] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:22.328] has:Object 'Kind' is missing
I0111 23:45:22.415] horizontalpodautoscaler.autoscaling "busybox0" deleted
I0111 23:45:22.508] horizontalpodautoscaler.autoscaling "busybox1" deleted
I0111 23:45:22.612] test-cmd-util.sh:2535: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:22.710] test-cmd-util.sh:2536: Successful get rc busybox0 {{.spec.replicas}}: 1
I0111 23:45:22.809] test-cmd-util.sh:2537: Successful get rc busybox1 {{.spec.replicas}}: 1
I0111 23:45:23.013] test-cmd-util.sh:2541: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I0111 23:45:23.110] test-cmd-util.sh:2542: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80
I0111 23:45:23.112] Successful
I0111 23:45:23.112] message:service/busybox0 exposed
I0111 23:45:23.112] service/busybox1 exposed
I0111 23:45:23.112] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:23.112] has:Object 'Kind' is missing
I0111 23:45:23.212] test-cmd-util.sh:2548: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:23.313] test-cmd-util.sh:2549: Successful get rc busybox0 {{.spec.replicas}}: 1
I0111 23:45:23.410] test-cmd-util.sh:2550: Successful get rc busybox1 {{.spec.replicas}}: 1
I0111 23:45:23.614] test-cmd-util.sh:2554: Successful get rc busybox0 {{.spec.replicas}}: 2
I0111 23:45:23.714] test-cmd-util.sh:2555: Successful get rc busybox1 {{.spec.replicas}}: 2
I0111 23:45:23.716] Successful
I0111 23:45:23.716] message:replicationcontroller/busybox0 scaled
I0111 23:45:23.716] replicationcontroller/busybox1 scaled
I0111 23:45:23.717] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:23.717] has:Object 'Kind' is missing
I0111 23:45:23.815] test-cmd-util.sh:2560: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:24.013] test-cmd-util.sh:2564: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:24.016] Successful
I0111 23:45:24.016] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
I0111 23:45:24.016] replicationcontroller "busybox0" force deleted
I0111 23:45:24.016] replicationcontroller "busybox1" force deleted
I0111 23:45:24.017] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:24.017] has:Object 'Kind' is missing
I0111 23:45:24.114] test-cmd-util.sh:2569: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:24.273] deployment.extensions/nginx1-deployment created
I0111 23:45:24.276] deployment.extensions/nginx0-deployment created
W0111 23:45:24.378] I0111 23:45:23.507415   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250317-15953", Name:"busybox0", UID:"f57fe219-15fa-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1043", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-rbgkh
W0111 23:45:24.378] I0111 23:45:23.514520   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250317-15953", Name:"busybox1", UID:"f5808953-15fa-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1048", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-5gwkd
W0111 23:45:24.378] I0111 23:45:24.276403   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250317-15953", Name:"nginx1-deployment", UID:"f7128c38-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1063", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx1-deployment-5dc485c78 to 2
W0111 23:45:24.379] error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0111 23:45:24.379] I0111 23:45:24.279391   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250317-15953", Name:"nginx0-deployment", UID:"f7133051-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1065", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx0-deployment-76db6cfd79 to 2
W0111 23:45:24.379] I0111 23:45:24.279723   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx1-deployment-5dc485c78", UID:"f7131245-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1064", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-5dc485c78-vkt4b
W0111 23:45:24.379] I0111 23:45:24.282180   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx1-deployment-5dc485c78", UID:"f7131245-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1064", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-5dc485c78-28t2j
W0111 23:45:24.380] I0111 23:45:24.282225   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx0-deployment-76db6cfd79", UID:"f713a78a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1068", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-76db6cfd79-xxvnw
W0111 23:45:24.380] I0111 23:45:24.285115   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250317-15953", Name:"nginx0-deployment-76db6cfd79", UID:"f713a78a-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1068", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-76db6cfd79-trkck
I0111 23:45:24.481] test-cmd-util.sh:2573: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment:
I0111 23:45:24.487] test-cmd-util.sh:2574: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I0111 23:45:24.826] test-cmd-util.sh:2578: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9:
I0111 23:45:24.828] Successful
I0111 23:45:24.829] message:deployment.extensions/nginx1-deployment
I0111 23:45:24.829] deployment.extensions/nginx0-deployment
I0111 23:45:24.829] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:24.829] has:Object 'Kind' is missing
I0111 23:45:24.926] deployment.extensions/nginx1-deployment paused
I0111 23:45:24.929] deployment.extensions/nginx0-deployment paused
W0111 23:45:25.029] I0111 23:45:24.589832   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250317-15953", Name:"nginx1-deployment", UID:"f7128c38-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1087", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackTemplateUnchanged' The rollback revision contains the same template as current deployment "nginx1-deployment"
W0111 23:45:25.030] I0111 23:45:24.622688   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250317-15953", Name:"nginx0-deployment", UID:"f7133051-15fa-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1091", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackTemplateUnchanged' The rollback revision contains the same template as current deployment "nginx0-deployment"
I0111 23:45:25.130] test-cmd-util.sh:2585: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true:
I0111 23:45:25.130] Successful
I0111 23:45:25.131] message:error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:25.131] has:Object 'Kind' is missing
I0111 23:45:25.134] deployment.extensions/nginx1-deployment resumed
I0111 23:45:25.137] deployment.extensions/nginx0-deployment resumed
I0111 23:45:25.245] test-cmd-util.sh:2591: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: <no value>:<no value>:
I0111 23:45:25.247] Successful
I0111 23:45:25.247] message:error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:25.247] has:Object 'Kind' is missing
I0111 23:45:25.362] Successful
I0111 23:45:25.362] message:deployments "nginx1-deployment"
I0111 23:45:25.362] REVISION  CHANGE-CAUSE
I0111 23:45:25.362] 1         <none>
I0111 23:45:25.362] 
I0111 23:45:25.362] deployments "nginx0-deployment"
I0111 23:45:25.363] REVISION  CHANGE-CAUSE
I0111 23:45:25.363] 1         <none>
I0111 23:45:25.363] 
I0111 23:45:25.363] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:25.363] has:nginx0-deployment
I0111 23:45:25.363] Successful
I0111 23:45:25.364] message:deployments "nginx1-deployment"
I0111 23:45:25.364] REVISION  CHANGE-CAUSE
I0111 23:45:25.364] 1         <none>
I0111 23:45:25.364] 
I0111 23:45:25.364] deployments "nginx0-deployment"
I0111 23:45:25.364] REVISION  CHANGE-CAUSE
I0111 23:45:25.364] 1         <none>
I0111 23:45:25.364] 
I0111 23:45:25.365] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:25.365] has:nginx1-deployment
I0111 23:45:25.366] Successful
I0111 23:45:25.366] message:deployments "nginx1-deployment"
I0111 23:45:25.366] REVISION  CHANGE-CAUSE
I0111 23:45:25.366] 1         <none>
I0111 23:45:25.366] 
I0111 23:45:25.367] deployments "nginx0-deployment"
I0111 23:45:25.367] REVISION  CHANGE-CAUSE
I0111 23:45:25.367] 1         <none>
I0111 23:45:25.367] 
I0111 23:45:25.367] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:25.367] has:Object 'Kind' is missing
I0111 23:45:25.450] deployment.extensions "nginx1-deployment" force deleted
I0111 23:45:25.454] deployment.extensions "nginx0-deployment" force deleted
W0111 23:45:25.555] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0111 23:45:25.556] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}'
I0111 23:45:26.558] test-cmd-util.sh:2607: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:26.704] replicationcontroller/busybox0 created
I0111 23:45:26.708] replicationcontroller/busybox1 created
I0111 23:45:26.814] test-cmd-util.sh:2611: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1:
I0111 23:45:26.912] Successful
I0111 23:45:26.912] message:no rollbacker has been implemented for {"" "ReplicationController"}
... skipping 4 lines ...
I0111 23:45:26.914] message:no rollbacker has been implemented for {"" "ReplicationController"}
I0111 23:45:26.914] no rollbacker has been implemented for {"" "ReplicationController"}
I0111 23:45:26.914] unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:26.915] has:Object 'Kind' is missing
I0111 23:45:27.014] Successful
I0111 23:45:27.014] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:27.014] error: replicationcontrollers "busybox0" pausing is not supported
I0111 23:45:27.014] error: replicationcontrollers "busybox1" pausing is not supported
I0111 23:45:27.015] has:Object 'Kind' is missing
I0111 23:45:27.016] Successful
I0111 23:45:27.016] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:27.016] error: replicationcontrollers "busybox0" pausing is not supported
I0111 23:45:27.017] error: replicationcontrollers "busybox1" pausing is not supported
I0111 23:45:27.017] has:replicationcontrollers "busybox0" pausing is not supported
I0111 23:45:27.017] Successful
I0111 23:45:27.018] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:27.018] error: replicationcontrollers "busybox0" pausing is not supported
I0111 23:45:27.018] error: replicationcontrollers "busybox1" pausing is not supported
I0111 23:45:27.018] has:replicationcontrollers "busybox1" pausing is not supported
I0111 23:45:27.117] Successful
I0111 23:45:27.117] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:27.117] error: replicationcontrollers "busybox0" resuming is not supported
I0111 23:45:27.118] error: replicationcontrollers "busybox1" resuming is not supported
I0111 23:45:27.118] has:Object 'Kind' is missing
I0111 23:45:27.119] Successful
I0111 23:45:27.119] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:27.119] error: replicationcontrollers "busybox0" resuming is not supported
I0111 23:45:27.119] error: replicationcontrollers "busybox1" resuming is not supported
I0111 23:45:27.119] has:replicationcontrollers "busybox0" resuming is not supported
I0111 23:45:27.121] Successful
I0111 23:45:27.121] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:27.122] error: replicationcontrollers "busybox0" resuming is not supported
I0111 23:45:27.122] error: replicationcontrollers "busybox1" resuming is not supported
I0111 23:45:27.122] has:replicationcontrollers "busybox0" resuming is not supported
I0111 23:45:27.204] replicationcontroller "busybox0" force deleted
I0111 23:45:27.209] replicationcontroller "busybox1" force deleted
W0111 23:45:27.309] I0111 23:45:26.706830   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250317-15953", Name:"busybox0", UID:"f88583f6-15fa-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1120", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-gkmmq
W0111 23:45:27.310] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false
W0111 23:45:27.310] I0111 23:45:26.711431   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250317-15953", Name:"busybox1", UID:"f88639ec-15fa-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1122", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-kr6f2
W0111 23:45:27.310] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely.
W0111 23:45:27.311] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}'
I0111 23:45:28.229] +++ exit code: 0
I0111 23:45:28.320] Recording: run_namespace_tests
I0111 23:45:28.320] Running command: run_namespace_tests
I0111 23:45:28.341] 
I0111 23:45:28.343] +++ Running case: test-cmd.run_namespace_tests 
I0111 23:45:28.345] +++ working dir: /go/src/k8s.io/kubernetes
... skipping 2 lines ...
I0111 23:45:28.438] namespace/my-namespace created
I0111 23:45:28.543] test-cmd-util.sh:2650: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace
I0111 23:45:28.630] namespace "my-namespace" deleted
I0111 23:45:33.730] namespace/my-namespace condition met
I0111 23:45:33.822] Successful
I0111 23:45:33.822] message:No resources found.
I0111 23:45:33.822] Error from server (NotFound): namespaces "my-namespace" not found
I0111 23:45:33.822] has: not found
W0111 23:45:33.923] I0111 23:45:33.921274   73116 controller_utils.go:1025] Waiting for caches to sync for resource quota controller
W0111 23:45:34.022] I0111 23:45:34.021655   73116 controller_utils.go:1032] Caches are synced for resource quota controller
I0111 23:45:34.122] test-cmd-util.sh:2665: Successful get namespaces {{range.items}}{{ if eq $id_field \"other\" }}found{{end}}{{end}}:: :
I0111 23:45:34.123] namespace/other created
I0111 23:45:34.128] test-cmd-util.sh:2669: Successful get namespaces/other {{.metadata.name}}: other
I0111 23:45:34.230] test-cmd-util.sh:2673: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:34.377] pod/valid-pod created
I0111 23:45:34.480] test-cmd-util.sh:2677: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0111 23:45:34.576] test-cmd-util.sh:2679: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0111 23:45:34.664] Successful
I0111 23:45:34.665] message:error: a resource cannot be retrieved by name across all namespaces
I0111 23:45:34.665] has:a resource cannot be retrieved by name across all namespaces
I0111 23:45:34.766] test-cmd-util.sh:2686: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod:
I0111 23:45:34.853] pod "valid-pod" force deleted
I0111 23:45:34.955] test-cmd-util.sh:2690: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:45:35.037] namespace "other" deleted
W0111 23:45:35.138] I0111 23:45:34.182157   73116 controller_utils.go:1025] Waiting for caches to sync for garbage collector controller
... skipping 114 lines ...
I0111 23:45:55.287] +++ command: run_client_config_tests
I0111 23:45:55.298] +++ [0111 23:45:55] Creating namespace namespace-1547250355-8588
I0111 23:45:55.377] namespace/namespace-1547250355-8588 created
I0111 23:45:55.451] Context "test" modified.
I0111 23:45:55.458] +++ [0111 23:45:55] Testing client config
I0111 23:45:55.538] Successful
I0111 23:45:55.538] message:error: stat missing: no such file or directory
I0111 23:45:55.539] has:missing: no such file or directory
I0111 23:45:55.615] Successful
I0111 23:45:55.615] message:error: stat missing: no such file or directory
I0111 23:45:55.615] has:missing: no such file or directory
I0111 23:45:55.690] Successful
I0111 23:45:55.690] message:error: stat missing: no such file or directory
I0111 23:45:55.690] has:missing: no such file or directory
I0111 23:45:55.769] Successful
I0111 23:45:55.769] message:Error in configuration: context was not found for specified context: missing-context
I0111 23:45:55.769] has:context was not found for specified context: missing-context
I0111 23:45:55.845] Successful
I0111 23:45:55.846] message:error: no server found for cluster "missing-cluster"
I0111 23:45:55.846] has:no server found for cluster "missing-cluster"
I0111 23:45:55.923] Successful
I0111 23:45:55.923] message:auth info "missing-user" does not exist
I0111 23:45:55.923] auth info "missing-user" does not exist
I0111 23:45:55.923] has:auth info "missing-user" does not exist
I0111 23:45:56.074] Successful
I0111 23:45:56.074] message:error: Error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1"
I0111 23:45:56.075] has:Error loading config file
I0111 23:45:56.150] Successful
I0111 23:45:56.151] message:error: stat missing-config: no such file or directory
I0111 23:45:56.151] has:no such file or directory
I0111 23:45:56.164] +++ exit code: 0
I0111 23:45:56.208] Recording: run_service_accounts_tests
I0111 23:45:56.208] Running command: run_service_accounts_tests
I0111 23:45:56.228] 
I0111 23:45:56.229] +++ Running case: test-cmd.run_service_accounts_tests 
... skipping 76 lines ...
I0111 23:46:03.679]                 job-name=test-job
I0111 23:46:03.679]                 run=pi
I0111 23:46:03.679] Annotations:    cronjob.kubernetes.io/instantiate=manual
I0111 23:46:03.679] Parallelism:    1
I0111 23:46:03.679] Completions:    1
I0111 23:46:03.680] Start Time:     Fri, 11 Jan 2019 23:46:03 +0000
I0111 23:46:03.680] Pods Statuses:  1 Running / 0 Succeeded / 0 Failed
I0111 23:46:03.680] Pod Template:
I0111 23:46:03.680]   Labels:  controller-uid=0e63cc58-15fb-11e9-befc-0242ac110002
I0111 23:46:03.680]            job-name=test-job
I0111 23:46:03.680]            run=pi
I0111 23:46:03.680]   Containers:
I0111 23:46:03.680]    pi:
... skipping 304 lines ...
I0111 23:46:12.427]   selector:
I0111 23:46:12.427]     role: padawan
I0111 23:46:12.427]   sessionAffinity: None
I0111 23:46:12.427]   type: ClusterIP
I0111 23:46:12.427] status:
I0111 23:46:12.427]   loadBalancer: {}
W0111 23:46:12.527] error: you must specify resources by --filename when --local is set.
W0111 23:46:12.528] Example resource specifications include:
W0111 23:46:12.528]    '-f rsrc.yaml'
W0111 23:46:12.528]    '--filename=rsrc.json'
I0111 23:46:12.628] test-cmd-util.sh:2890: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend:
I0111 23:46:12.771] test-cmd-util.sh:2897: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master:
I0111 23:46:12.855] service "redis-master" deleted
... skipping 63 lines ...
I0111 23:46:18.144] +++ [0111 23:46:18] Creating namespace namespace-1547250378-28896
I0111 23:46:18.225] namespace/namespace-1547250378-28896 created
I0111 23:46:18.302] Context "test" modified.
I0111 23:46:18.308] +++ [0111 23:46:18] Testing kubectl(v1:daemonsets, v1:controllerrevisions)
I0111 23:46:18.408] test-cmd-util.sh:3682: Successful get daemonsets {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:18.565] daemonset.extensions/bind created
W0111 23:46:18.665] I0111 23:46:16.060015   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1283", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.666] I0111 23:46:16.060154   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1283", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.666] I0111 23:46:16.060235   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1283", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.667] I0111 23:46:16.063124   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1286", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.667] I0111 23:46:16.063182   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1286", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.667] I0111 23:46:16.063207   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1286", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.668] I0111 23:46:16.482809   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1292", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.668] I0111 23:46:16.482840   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1292", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.668] I0111 23:46:16.482888   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1292", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.669] I0111 23:46:16.485619   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1294", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.669] I0111 23:46:16.485677   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1294", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.669] I0111 23:46:16.485813   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1294", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.670] I0111 23:46:16.671509   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1301", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.670] I0111 23:46:16.671557   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1301", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.670] I0111 23:46:16.671650   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1301", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.671] I0111 23:46:16.675138   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1303", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.671] I0111 23:46:16.675190   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1303", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.671] I0111 23:46:16.675203   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1303", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.671] I0111 23:46:16.858788   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1310", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.672] I0111 23:46:16.858833   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1310", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.672] I0111 23:46:16.858859   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1310", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.673] I0111 23:46:16.862615   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1312", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.673] I0111 23:46:16.862651   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1312", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.673] I0111 23:46:16.862664   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250375-31644", Name:"bind", UID:"15f030b0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1312", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.673] I0111 23:46:18.568617   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1331", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.674] I0111 23:46:18.568666   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1331", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.674] I0111 23:46:18.568689   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1331", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.675] I0111 23:46:18.578069   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1333", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.675] I0111 23:46:18.578572   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1333", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:18.675] I0111 23:46:18.578809   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1333", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
I0111 23:46:18.776] test-cmd-util.sh:3686: Successful get controllerrevisions {{range.items}}{{.metadata.annotations}}:{{end}}: map[deprecated.daemonset.template.generation:1 kubectl.kubernetes.io/last-applied-configuration:{"apiVersion":"extensions/v1beta1","kind":"DaemonSet","metadata":{"annotations":{"kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"},"name":"bind","namespace":"namespace-1547250378-28896"},"spec":{"template":{"metadata":{"labels":{"service":"bind"}},"spec":{"affinity":{"podAntiAffinity":{"requiredDuringSchedulingIgnoredDuringExecution":[{"labelSelector":{"matchExpressions":[{"key":"service","operator":"In","values":["bind"]}]},"namespaces":[],"topologyKey":"kubernetes.io/hostname"}]}},"containers":[{"image":"k8s.gcr.io/pause:2.0","name":"kubernetes-pause"}]}},"updateStrategy":{"rollingUpdate":{"maxUnavailable":"10%"},"type":"RollingUpdate"}}}
I0111 23:46:18.777]  kubernetes.io/change-cause:kubectl apply --filename=hack/testdata/rollingupdate-daemonset.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true]:
I0111 23:46:18.779] daemonset.extensions/bind skipped rollback (current template already matches revision 1)
I0111 23:46:18.881] test-cmd-util.sh:3689: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0111 23:46:18.971] test-cmd-util.sh:3690: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0111 23:46:19.120] daemonset.extensions/bind configured
... skipping 18 lines ...
I0111 23:46:19.776] test-cmd-util.sh:3700: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0111 23:46:19.864] test-cmd-util.sh:3701: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0111 23:46:19.962] daemonset.extensions/bind rolled back
I0111 23:46:20.055] test-cmd-util.sh:3704: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0111 23:46:20.141] test-cmd-util.sh:3705: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0111 23:46:20.241] Successful
I0111 23:46:20.242] message:error: unable to find specified revision 1000000 in history
I0111 23:46:20.242] has:unable to find specified revision
I0111 23:46:20.336] test-cmd-util.sh:3709: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0:
I0111 23:46:20.422] test-cmd-util.sh:3710: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0111 23:46:20.523] daemonset.extensions/bind rolled back
I0111 23:46:20.625] test-cmd-util.sh:3713: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest:
I0111 23:46:20.717] test-cmd-util.sh:3714: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
... skipping 13 lines ...
I0111 23:46:22.247] test-cmd-util.sh:3012: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:22.392] replicationcontroller/frontend created
I0111 23:46:22.479] replicationcontroller "frontend" deleted
I0111 23:46:22.579] test-cmd-util.sh:3017: Successful get pods -l "name=frontend" {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:22.669] test-cmd-util.sh:3021: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:22.817] replicationcontroller/frontend created
W0111 23:46:22.917] I0111 23:46:19.123958   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1341", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.918] I0111 23:46:19.124009   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1341", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.918] I0111 23:46:19.124023   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1341", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.918] I0111 23:46:19.127674   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1343", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.919] I0111 23:46:19.127702   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1343", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.919] I0111 23:46:19.127714   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1343", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.919] I0111 23:46:19.960232   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1350", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.920] I0111 23:46:19.960269   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1350", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.920] I0111 23:46:19.960277   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1350", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.920] I0111 23:46:19.963228   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1352", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.920] I0111 23:46:19.963268   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1352", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.921] I0111 23:46:19.963279   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1352", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.921] I0111 23:46:20.523088   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1359", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.921] I0111 23:46:20.523131   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1359", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.922] I0111 23:46:20.523191   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1359", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.922] I0111 23:46:20.526415   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1361", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.922] I0111 23:46:20.526593   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1361", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.922] I0111 23:46:20.526658   73116 event.go:221] Event(v1.ObjectReference{Kind:"DaemonSet", Namespace:"namespace-1547250378-28896", Name:"bind", UID:"176ee751-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1361", FieldPath:""}): type: 'Warning' reason: 'FailedPlacement' failed to place pod on "127.0.0.1": Node didn't have enough resource: pods, requested: 1, used: 0, capacity: 0
W0111 23:46:22.923] I0111 23:46:22.396810   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19b6f47f-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1378", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-4n2zf
W0111 23:46:22.923] I0111 23:46:22.399263   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19b6f47f-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1378", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-z8vfk
W0111 23:46:22.923] I0111 23:46:22.399637   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19b6f47f-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1378", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-b5thb
W0111 23:46:22.923] I0111 23:46:22.820217   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19f7a9b3-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1394", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-dkfxw
W0111 23:46:22.924] I0111 23:46:22.822670   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19f7a9b3-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1394", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-pv9rr
W0111 23:46:22.924] I0111 23:46:22.822824   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19f7a9b3-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1394", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-pxzlm
... skipping 3 lines ...
I0111 23:46:23.077] Namespace:    namespace-1547250381-25812
I0111 23:46:23.077] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.077] Labels:       app=guestbook
I0111 23:46:23.077]               tier=frontend
I0111 23:46:23.077] Annotations:  <none>
I0111 23:46:23.077] Replicas:     3 current / 3 desired
I0111 23:46:23.077] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.077] Pod Template:
I0111 23:46:23.077]   Labels:  app=guestbook
I0111 23:46:23.077]            tier=frontend
I0111 23:46:23.077]   Containers:
I0111 23:46:23.078]    php-redis:
I0111 23:46:23.078]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0111 23:46:23.190] Namespace:    namespace-1547250381-25812
I0111 23:46:23.190] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.190] Labels:       app=guestbook
I0111 23:46:23.190]               tier=frontend
I0111 23:46:23.190] Annotations:  <none>
I0111 23:46:23.191] Replicas:     3 current / 3 desired
I0111 23:46:23.191] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.191] Pod Template:
I0111 23:46:23.191]   Labels:  app=guestbook
I0111 23:46:23.191]            tier=frontend
I0111 23:46:23.191]   Containers:
I0111 23:46:23.191]    php-redis:
I0111 23:46:23.191]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
I0111 23:46:23.299] Namespace:    namespace-1547250381-25812
I0111 23:46:23.299] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.299] Labels:       app=guestbook
I0111 23:46:23.299]               tier=frontend
I0111 23:46:23.299] Annotations:  <none>
I0111 23:46:23.299] Replicas:     3 current / 3 desired
I0111 23:46:23.299] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.299] Pod Template:
I0111 23:46:23.299]   Labels:  app=guestbook
I0111 23:46:23.300]            tier=frontend
I0111 23:46:23.300]   Containers:
I0111 23:46:23.300]    php-redis:
I0111 23:46:23.300]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 12 lines ...
I0111 23:46:23.418] Namespace:    namespace-1547250381-25812
I0111 23:46:23.418] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.418] Labels:       app=guestbook
I0111 23:46:23.418]               tier=frontend
I0111 23:46:23.418] Annotations:  <none>
I0111 23:46:23.418] Replicas:     3 current / 3 desired
I0111 23:46:23.418] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.418] Pod Template:
I0111 23:46:23.418]   Labels:  app=guestbook
I0111 23:46:23.418]            tier=frontend
I0111 23:46:23.418]   Containers:
I0111 23:46:23.419]    php-redis:
I0111 23:46:23.419]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 18 lines ...
I0111 23:46:23.571] Namespace:    namespace-1547250381-25812
I0111 23:46:23.571] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.571] Labels:       app=guestbook
I0111 23:46:23.571]               tier=frontend
I0111 23:46:23.571] Annotations:  <none>
I0111 23:46:23.571] Replicas:     3 current / 3 desired
I0111 23:46:23.571] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.572] Pod Template:
I0111 23:46:23.572]   Labels:  app=guestbook
I0111 23:46:23.572]            tier=frontend
I0111 23:46:23.572]   Containers:
I0111 23:46:23.572]    php-redis:
I0111 23:46:23.572]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0111 23:46:23.682] Namespace:    namespace-1547250381-25812
I0111 23:46:23.682] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.682] Labels:       app=guestbook
I0111 23:46:23.682]               tier=frontend
I0111 23:46:23.682] Annotations:  <none>
I0111 23:46:23.682] Replicas:     3 current / 3 desired
I0111 23:46:23.683] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.683] Pod Template:
I0111 23:46:23.683]   Labels:  app=guestbook
I0111 23:46:23.683]            tier=frontend
I0111 23:46:23.683]   Containers:
I0111 23:46:23.683]    php-redis:
I0111 23:46:23.683]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0111 23:46:23.791] Namespace:    namespace-1547250381-25812
I0111 23:46:23.791] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.791] Labels:       app=guestbook
I0111 23:46:23.791]               tier=frontend
I0111 23:46:23.791] Annotations:  <none>
I0111 23:46:23.791] Replicas:     3 current / 3 desired
I0111 23:46:23.791] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.791] Pod Template:
I0111 23:46:23.792]   Labels:  app=guestbook
I0111 23:46:23.792]            tier=frontend
I0111 23:46:23.792]   Containers:
I0111 23:46:23.792]    php-redis:
I0111 23:46:23.792]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 11 lines ...
I0111 23:46:23.901] Namespace:    namespace-1547250381-25812
I0111 23:46:23.901] Selector:     app=guestbook,tier=frontend
I0111 23:46:23.901] Labels:       app=guestbook
I0111 23:46:23.902]               tier=frontend
I0111 23:46:23.902] Annotations:  <none>
I0111 23:46:23.902] Replicas:     3 current / 3 desired
I0111 23:46:23.902] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:23.902] Pod Template:
I0111 23:46:23.902]   Labels:  app=guestbook
I0111 23:46:23.902]            tier=frontend
I0111 23:46:23.902]   Containers:
I0111 23:46:23.902]    php-redis:
I0111 23:46:23.902]     Image:      gcr.io/google_samples/gb-frontend:v4
... skipping 17 lines ...
I0111 23:46:24.198] test-cmd-util.sh:3049: Successful get rc frontend {{.spec.replicas}}: 2
I0111 23:46:24.297] test-cmd-util.sh:3053: Successful get rc frontend {{.spec.replicas}}: 2
I0111 23:46:24.495] test-cmd-util.sh:3057: Successful get rc frontend {{.spec.replicas}}: 2
I0111 23:46:24.590] test-cmd-util.sh:3061: Successful get rc frontend {{.spec.replicas}}: 2
I0111 23:46:24.687] replicationcontroller/frontend scaled
W0111 23:46:24.788] I0111 23:46:24.101548   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19f7a9b3-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1405", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: frontend-dkfxw
W0111 23:46:24.788] error: Expected replicas to be 3, was 2
W0111 23:46:24.788] I0111 23:46:24.690546   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19f7a9b3-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1411", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-52f69
I0111 23:46:24.889] test-cmd-util.sh:3065: Successful get rc frontend {{.spec.replicas}}: 3
I0111 23:46:24.907] test-cmd-util.sh:3069: Successful get rc frontend {{.spec.replicas}}: 3
I0111 23:46:24.996] replicationcontroller/frontend scaled
W0111 23:46:25.096] I0111 23:46:25.000257   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"19f7a9b3-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1417", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: frontend-52f69
I0111 23:46:25.197] test-cmd-util.sh:3073: Successful get rc frontend {{.spec.replicas}}: 2
... skipping 61 lines ...
I0111 23:46:28.969] service "frontend" deleted
I0111 23:46:28.975] service "frontend-2" deleted
I0111 23:46:28.982] service "frontend-3" deleted
I0111 23:46:28.987] service "frontend-4" deleted
I0111 23:46:28.993] service "frontend-5" deleted
I0111 23:46:29.099] Successful
I0111 23:46:29.099] message:error: cannot expose a { Node}
I0111 23:46:29.099] has:cannot expose
I0111 23:46:29.195] Successful
I0111 23:46:29.195] message:The Service "invalid-large-service-name-that-has-more-than-sixty-three-characters" is invalid: metadata.name: Invalid value: "invalid-large-service-name-that-has-more-than-sixty-three-characters": must be no more than 63 characters
I0111 23:46:29.195] has:metadata.name: Invalid value
I0111 23:46:29.296] Successful
I0111 23:46:29.296] message:service/kubernetes-serve-hostname-testing-sixty-three-characters-in-len exposed
... skipping 30 lines ...
W0111 23:46:31.650] I0111 23:46:31.168338   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"1ef1709f-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1634", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-ppxv5
W0111 23:46:31.651] I0111 23:46:31.171101   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"1ef1709f-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1634", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-zd6nv
W0111 23:46:31.651] I0111 23:46:31.171208   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1547250381-25812", Name:"frontend", UID:"1ef1709f-15fb-11e9-befc-0242ac110002", APIVersion:"v1", ResourceVersion:"1634", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-xlt7w
I0111 23:46:31.751] horizontalpodautoscaler.autoscaling/frontend autoscaled
I0111 23:46:31.755] test-cmd-util.sh:3213: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
I0111 23:46:31.843] horizontalpodautoscaler.autoscaling "frontend" deleted
W0111 23:46:31.943] Error: required flag(s) "max" not set
W0111 23:46:31.943] 
W0111 23:46:31.943] 
W0111 23:46:31.944] Examples:
W0111 23:46:31.944]   # Auto scale a deployment "foo", with the number of pods between 2 and 10, no target CPU utilization specified so a default autoscaling policy will be used:
W0111 23:46:31.944]   kubectl autoscale deployment foo --min=2 --max=10
W0111 23:46:31.944]   
... skipping 69 lines ...
I0111 23:46:32.211]       dnsPolicy: ClusterFirst
I0111 23:46:32.211]       restartPolicy: Always
I0111 23:46:32.212]       schedulerName: default-scheduler
I0111 23:46:32.212]       securityContext: {}
I0111 23:46:32.212]       terminationGracePeriodSeconds: 0
I0111 23:46:32.212] status: {}
W0111 23:46:32.312] Error from server (NotFound): deployments.extensions "nginx-deployment-resources" not found
I0111 23:46:32.456] deployment.extensions/nginx-deployment-resources created
W0111 23:46:32.557] I0111 23:46:32.459917   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources", UID:"1fb66add-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1654", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-57c6b5597b to 3
W0111 23:46:32.557] I0111 23:46:32.462625   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-57c6b5597b", UID:"1fb6f932-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1655", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-57c6b5597b-dd2mm
W0111 23:46:32.557] I0111 23:46:32.464882   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-57c6b5597b", UID:"1fb6f932-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1655", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-57c6b5597b-wqzfr
W0111 23:46:32.558] I0111 23:46:32.465310   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-57c6b5597b", UID:"1fb6f932-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1655", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-57c6b5597b-v7l6p
I0111 23:46:32.658] test-cmd-util.sh:3228: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment-resources:
... skipping 7 lines ...
I0111 23:46:33.451] test-cmd-util.sh:3240: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 100m:
I0111 23:46:33.546] deployment.extensions/nginx-deployment-resources resource requirements updated
W0111 23:46:33.647] I0111 23:46:32.857288   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources", UID:"1fb66add-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1668", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-79bfbb6584 to 1
W0111 23:46:33.648] I0111 23:46:32.859986   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-79bfbb6584", UID:"1ff3ae85-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1669", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-79bfbb6584-zx9hz
W0111 23:46:33.648] I0111 23:46:32.863021   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources", UID:"1fb66add-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1668", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-resources-57c6b5597b to 2
W0111 23:46:33.649] I0111 23:46:32.866989   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-57c6b5597b", UID:"1fb6f932-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1675", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-57c6b5597b-dd2mm
W0111 23:46:33.649] E0111 23:46:32.868451   73116 replica_set.go:450] Sync "namespace-1547250381-25812/nginx-deployment-resources-79bfbb6584" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-resources-79bfbb6584": the object has been modified; please apply your changes to the latest version and try again
W0111 23:46:33.649] I0111 23:46:32.868935   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources", UID:"1fb66add-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1670", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-79bfbb6584 to 2
W0111 23:46:33.650] I0111 23:46:32.877153   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-79bfbb6584", UID:"1ff3ae85-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1679", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-79bfbb6584-w5wnd
W0111 23:46:33.650] error: unable to find container named redis
W0111 23:46:33.650] I0111 23:46:33.255801   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources", UID:"1fb66add-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1695", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-resources-57c6b5597b to 0
W0111 23:46:33.650] I0111 23:46:33.259394   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-57c6b5597b", UID:"1fb6f932-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1699", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-57c6b5597b-wqzfr
W0111 23:46:33.651] I0111 23:46:33.259627   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-57c6b5597b", UID:"1fb6f932-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1699", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-57c6b5597b-v7l6p
W0111 23:46:33.651] I0111 23:46:33.262418   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources", UID:"1fb66add-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1698", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-775fc4497d to 2
W0111 23:46:33.652] I0111 23:46:33.265474   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-775fc4497d", UID:"202f785e-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1706", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-775fc4497d-8bgw2
W0111 23:46:33.652] I0111 23:46:33.313520   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-775fc4497d", UID:"202f785e-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1706", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-775fc4497d-gm5b5
... skipping 77 lines ...
I0111 23:46:33.970]     reason: NewReplicaSetCreated
I0111 23:46:33.971]     status: "True"
I0111 23:46:33.971]     type: Progressing
I0111 23:46:33.971]   observedGeneration: 4
I0111 23:46:33.971]   replicas: 2
I0111 23:46:33.971]   unavailableReplicas: 4
W0111 23:46:34.072] error: you must specify resources by --filename when --local is set.
W0111 23:46:34.072] Example resource specifications include:
W0111 23:46:34.072]    '-f rsrc.yaml'
W0111 23:46:34.072]    '--filename=rsrc.json'
I0111 23:46:34.173] test-cmd-util.sh:3249: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m:
I0111 23:46:34.256] test-cmd-util.sh:3250: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 300m:
I0111 23:46:34.365] test-cmd-util.sh:3251: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.requests.cpu}}:{{end}}: 300m:
... skipping 9 lines ...
I0111 23:46:34.708] namespace/namespace-1547250394-21691 created
I0111 23:46:34.789] Context "test" modified.
I0111 23:46:34.798] +++ [0111 23:46:34] Testing deployments
I0111 23:46:34.888] deployment.apps/test-nginx-extensions created
W0111 23:46:34.989] I0111 23:46:34.313709   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-58744d6979", UID:"205d7488-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1724", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-58744d6979-6qdcf
W0111 23:46:34.989] I0111 23:46:34.413545   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250381-25812", Name:"nginx-deployment-resources-58744d6979", UID:"205d7488-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1724", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-58744d6979-28xqb
W0111 23:46:34.990] E0111 23:46:34.512521   73116 replica_set.go:450] Sync "namespace-1547250381-25812/nginx-deployment-resources-79bfbb6584" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-resources-79bfbb6584": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1547250381-25812/nginx-deployment-resources-79bfbb6584, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 1ff3ae85-15fb-11e9-befc-0242ac110002, UID in object meta: 
W0111 23:46:34.990] E0111 23:46:34.662197   73116 replica_set.go:450] Sync "namespace-1547250381-25812/nginx-deployment-resources-58744d6979" failed with replicasets.apps "nginx-deployment-resources-58744d6979" not found
W0111 23:46:34.990] I0111 23:46:34.891329   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"test-nginx-extensions", UID:"2129838f-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1757", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set test-nginx-extensions-7f469cb69d to 1
W0111 23:46:34.991] I0111 23:46:34.895599   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"test-nginx-extensions-7f469cb69d", UID:"212a0920-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1758", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-nginx-extensions-7f469cb69d-xktgn
I0111 23:46:35.091] test-cmd-util.sh:3268: Successful get deploy test-nginx-extensions {{(index .spec.template.spec.containers 0).name}}: nginx
I0111 23:46:35.093] Successful
I0111 23:46:35.093] message:10
I0111 23:46:35.093] has not:2
... skipping 25 lines ...
I0111 23:46:36.028]                 pod-template-hash=1594316396
I0111 23:46:36.029] Annotations:    deployment.kubernetes.io/desired-replicas=1
I0111 23:46:36.029]                 deployment.kubernetes.io/max-replicas=2
I0111 23:46:36.029]                 deployment.kubernetes.io/revision=1
I0111 23:46:36.029] Controlled By:  Deployment/test-nginx-apps
I0111 23:46:36.029] Replicas:       1 current / 1 desired
I0111 23:46:36.029] Pods Status:    0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:36.029] Pod Template:
I0111 23:46:36.029]   Labels:  app=test-nginx-apps
I0111 23:46:36.029]            pod-template-hash=1594316396
I0111 23:46:36.029]   Containers:
I0111 23:46:36.029]    nginx:
I0111 23:46:36.030]     Image:        k8s.gcr.io/nginx:test-cmd
... skipping 81 lines ...
W0111 23:46:40.049] I0111 23:46:39.676904   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1880", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackTemplateUnchanged' The rollback revision contains the same template as current deployment "nginx"
W0111 23:46:40.049] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply
W0111 23:46:40.050] I0111 23:46:39.951516   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1884", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-6598b6dfdb to 1
W0111 23:46:40.050] I0111 23:46:39.954593   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-6598b6dfdb", UID:"242e2de5-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1885", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-6598b6dfdb-f7vph
W0111 23:46:40.050] I0111 23:46:39.957525   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1884", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-794c6b99b4 to 2
W0111 23:46:40.050] I0111 23:46:39.962667   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-794c6b99b4", UID:"23d669ac-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1890", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-794c6b99b4-5qq2m
W0111 23:46:40.051] E0111 23:46:39.963161   73116 replica_set.go:450] Sync "namespace-1547250394-21691/nginx-6598b6dfdb" failed with Operation cannot be fulfilled on replicasets.apps "nginx-6598b6dfdb": the object has been modified; please apply your changes to the latest version and try again
W0111 23:46:40.051] I0111 23:46:39.963300   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1888", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-6598b6dfdb to 2
W0111 23:46:40.051] I0111 23:46:39.966782   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-6598b6dfdb", UID:"242e2de5-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1894", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-6598b6dfdb-cbk2k
I0111 23:46:40.152] test-cmd-util.sh:3367: Successful get deployment.extensions {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
I0111 23:46:40.161]     Image:	k8s.gcr.io/nginx:test-cmd
I0111 23:46:40.261] test-cmd-util.sh:3370: Successful get deployment.extensions {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
I0111 23:46:40.394] deployment.extensions/nginx
... skipping 3 lines ...
I0111 23:46:41.692] test-cmd-util.sh:3377: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd:
I0111 23:46:41.801] deployment.extensions/nginx
W0111 23:46:41.902] I0111 23:46:41.590924   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1915", FieldPath:""}): type: 'Warning' reason: 'DeploymentRollbackRevisionNotFound' Unable to find the revision to rollback to.
W0111 23:46:41.902] I0111 23:46:41.795129   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1919", FieldPath:""}): type: 'Normal' reason: 'DeploymentRollback' Rolled back deployment "nginx" to revision 2
I0111 23:46:42.909] test-cmd-util.sh:3381: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:
I0111 23:46:43.003] deployment.extensions/nginx paused
W0111 23:46:43.104] error: you cannot rollback a paused deployment; resume it first with 'kubectl rollout resume deployment/nginx' and try again
I0111 23:46:43.205] deployment.extensions/nginx resumed
W0111 23:46:43.314] I0111 23:46:43.313795   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx", UID:"23d5e946-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1933", FieldPath:""}): type: 'Normal' reason: 'DeploymentRollback' Rolled back deployment "nginx" to revision 3
I0111 23:46:43.415] deployment.extensions/nginx
I0111 23:46:43.604]     deployment.kubernetes.io/revision-history: 1,3
W0111 23:46:43.705] error: desired revision (3) is different from the running revision (5)
I0111 23:46:43.856] deployment.extensions/nginx2 created
I0111 23:46:43.950] deployment.extensions "nginx2" deleted
I0111 23:46:44.044] deployment.extensions "nginx" deleted
I0111 23:46:44.146] test-cmd-util.sh:3403: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:44.293] deployment.extensions/nginx-deployment created
W0111 23:46:44.393] I0111 23:46:43.859121   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx2", UID:"2681f07e-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1939", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx2-5d58d7d8d4 to 3
... skipping 22 lines ...
W0111 23:46:46.078] I0111 23:46:44.699076   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment", UID:"26c4954b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1986", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-78d7b4bff9 to 1
W0111 23:46:46.079] I0111 23:46:44.701522   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-78d7b4bff9", UID:"2702a89b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1987", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-78d7b4bff9-l9jsf
W0111 23:46:46.079] I0111 23:46:44.703678   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment", UID:"26c4954b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1986", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-84765bf7f9 to 2
W0111 23:46:46.080] I0111 23:46:44.707256   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-84765bf7f9", UID:"26c51a8a-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1992", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-84765bf7f9-g8lpb
W0111 23:46:46.080] I0111 23:46:44.711400   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment", UID:"26c4954b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1990", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-78d7b4bff9 to 2
W0111 23:46:46.080] I0111 23:46:44.713443   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-78d7b4bff9", UID:"2702a89b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2000", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-78d7b4bff9-6m8m5
W0111 23:46:46.080] error: unable to find container named "redis"
W0111 23:46:46.081] I0111 23:46:45.987014   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment", UID:"26c4954b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2019", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-78d7b4bff9 to 0
W0111 23:46:46.081] I0111 23:46:45.991515   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-78d7b4bff9", UID:"2702a89b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2023", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-78d7b4bff9-6m8m5
W0111 23:46:46.082] I0111 23:46:45.991622   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-78d7b4bff9", UID:"2702a89b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2023", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-78d7b4bff9-l9jsf
W0111 23:46:46.082] I0111 23:46:45.993335   73116 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment", UID:"26c4954b-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2021", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-786f9d445c to 2
W0111 23:46:46.082] I0111 23:46:45.997117   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-786f9d445c", UID:"27c638bb-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2029", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-786f9d445c-fwq6j
W0111 23:46:46.083] I0111 23:46:45.998929   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-786f9d445c", UID:"27c638bb-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2029", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-786f9d445c-gj49k
... skipping 43 lines ...
I0111 23:46:48.393] deployment.extensions/nginx-deployment env updated
I0111 23:46:48.491] deployment.extensions/nginx-deployment env updated
W0111 23:46:48.592] I0111 23:46:48.446241   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-cdbc49cff", UID:"28bbc0c0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2149", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-cdbc49cff-8f5f9
W0111 23:46:48.593] I0111 23:46:48.494656   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250394-21691", Name:"nginx-deployment-cdbc49cff", UID:"28bbc0c0-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2149", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-cdbc49cff-4dnnq
I0111 23:46:48.693] deployment.extensions/nginx-deployment env updated
I0111 23:46:48.694] deployment.extensions "nginx-deployment" deleted
W0111 23:46:48.794] E0111 23:46:48.792982   73116 replica_set.go:450] Sync "namespace-1547250394-21691/nginx-deployment-67c9c8994" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-67c9c8994": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1547250394-21691/nginx-deployment-67c9c8994, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 291f0ea6-15fb-11e9-befc-0242ac110002, UID in object meta: 
W0111 23:46:48.893] E0111 23:46:48.892397   73116 replica_set.go:450] Sync "namespace-1547250394-21691/nginx-deployment-5fcdc7cb99" failed with replicasets.apps "nginx-deployment-5fcdc7cb99" not found
I0111 23:46:48.993] configmap "test-set-env-config" deleted
I0111 23:46:48.993] secret "test-set-env-secret" deleted
I0111 23:46:48.994] +++ exit code: 0
I0111 23:46:48.994] Recording: run_rs_tests
I0111 23:46:48.994] Running command: run_rs_tests
I0111 23:46:49.012] 
... skipping 5 lines ...
I0111 23:46:49.191] Context "test" modified.
I0111 23:46:49.198] +++ [0111 23:46:49] Testing kubectl(v1:replicasets)
I0111 23:46:49.294] test-cmd-util.sh:3486: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:49.454] replicaset.extensions/frontend created
I0111 23:46:49.468] +++ [0111 23:46:49] Deleting rs
I0111 23:46:49.558] replicaset.extensions "frontend" deleted
W0111 23:46:49.659] E0111 23:46:49.042876   73116 replica_set.go:450] Sync "namespace-1547250394-21691/nginx-deployment-cdbc49cff" failed with replicasets.apps "nginx-deployment-cdbc49cff" not found
W0111 23:46:49.659] E0111 23:46:49.092768   73116 replica_set.go:450] Sync "namespace-1547250394-21691/nginx-deployment-7b6cf544d6" failed with replicasets.apps "nginx-deployment-7b6cf544d6" not found
W0111 23:46:49.659] E0111 23:46:49.142680   73116 replica_set.go:450] Sync "namespace-1547250394-21691/nginx-deployment-f7b94bfb8" failed with replicasets.apps "nginx-deployment-f7b94bfb8" not found
W0111 23:46:49.659] I0111 23:46:49.458588   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"29d80e92-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2187", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-nfq4t
W0111 23:46:49.660] I0111 23:46:49.461126   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"29d80e92-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2187", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-zpnrh
W0111 23:46:49.660] I0111 23:46:49.461195   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"29d80e92-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2187", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-r42b6
W0111 23:46:49.660] E0111 23:46:49.642764   73116 replica_set.go:450] Sync "namespace-1547250409-32613/frontend" failed with replicasets.apps "frontend" not found
I0111 23:46:49.760] test-cmd-util.sh:3492: Successful get pods -l "tier=frontend" {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:49.767] test-cmd-util.sh:3496: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:49.920] replicaset.extensions/frontend created
W0111 23:46:50.021] I0111 23:46:49.923108   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"2a1f4449-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2203", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-vh24q
W0111 23:46:50.021] I0111 23:46:49.925958   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"2a1f4449-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2203", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-27zlz
W0111 23:46:50.022] I0111 23:46:49.926769   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"2a1f4449-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2203", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-pk5kq
I0111 23:46:50.122] test-cmd-util.sh:3500: Successful get pods -l "tier=frontend" {{range.items}}{{(index .spec.containers 0).name}}:{{end}}: php-redis:php-redis:php-redis:
I0111 23:46:50.123] +++ [0111 23:46:50] Deleting rs
I0111 23:46:50.127] replicaset.extensions "frontend" deleted
W0111 23:46:50.227] E0111 23:46:50.192884   73116 replica_set.go:450] Sync "namespace-1547250409-32613/frontend" failed with replicasets.apps "frontend" not found
I0111 23:46:50.328] test-cmd-util.sh:3504: Successful get rs {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:46:50.344] test-cmd-util.sh:3506: Successful get pods -l "tier=frontend" {{range.items}}{{(index .spec.containers 0).name}}:{{end}}: php-redis:php-redis:php-redis:
I0111 23:46:50.438] pod "frontend-27zlz" deleted
I0111 23:46:50.444] pod "frontend-pk5kq" deleted
I0111 23:46:50.449] pod "frontend-vh24q" deleted
I0111 23:46:50.558] test-cmd-util.sh:3509: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: 
... skipping 8 lines ...
I0111 23:46:51.082] Namespace:    namespace-1547250409-32613
I0111 23:46:51.083] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.083] Labels:       app=guestbook
I0111 23:46:51.083]               tier=frontend
I0111 23:46:51.083] Annotations:  <none>
I0111 23:46:51.083] Replicas:     3 current / 3 desired
I0111 23:46:51.083] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.083] Pod Template:
I0111 23:46:51.083]   Labels:  app=guestbook
I0111 23:46:51.083]            tier=frontend
I0111 23:46:51.084]   Containers:
I0111 23:46:51.084]    php-redis:
I0111 23:46:51.084]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
I0111 23:46:51.202] Namespace:    namespace-1547250409-32613
I0111 23:46:51.202] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.203] Labels:       app=guestbook
I0111 23:46:51.203]               tier=frontend
I0111 23:46:51.203] Annotations:  <none>
I0111 23:46:51.203] Replicas:     3 current / 3 desired
I0111 23:46:51.203] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.203] Pod Template:
I0111 23:46:51.203]   Labels:  app=guestbook
I0111 23:46:51.203]            tier=frontend
I0111 23:46:51.204]   Containers:
I0111 23:46:51.204]    php-redis:
I0111 23:46:51.204]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 18 lines ...
I0111 23:46:51.318] Namespace:    namespace-1547250409-32613
I0111 23:46:51.318] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.318] Labels:       app=guestbook
I0111 23:46:51.318]               tier=frontend
I0111 23:46:51.318] Annotations:  <none>
I0111 23:46:51.318] Replicas:     3 current / 3 desired
I0111 23:46:51.318] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.318] Pod Template:
I0111 23:46:51.319]   Labels:  app=guestbook
I0111 23:46:51.319]            tier=frontend
I0111 23:46:51.319]   Containers:
I0111 23:46:51.319]    php-redis:
I0111 23:46:51.319]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 12 lines ...
I0111 23:46:51.438] Namespace:    namespace-1547250409-32613
I0111 23:46:51.438] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.438] Labels:       app=guestbook
I0111 23:46:51.438]               tier=frontend
I0111 23:46:51.439] Annotations:  <none>
I0111 23:46:51.439] Replicas:     3 current / 3 desired
I0111 23:46:51.439] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.439] Pod Template:
I0111 23:46:51.439]   Labels:  app=guestbook
I0111 23:46:51.439]            tier=frontend
I0111 23:46:51.439]   Containers:
I0111 23:46:51.439]    php-redis:
I0111 23:46:51.439]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 18 lines ...
I0111 23:46:51.577] Namespace:    namespace-1547250409-32613
I0111 23:46:51.577] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.578] Labels:       app=guestbook
I0111 23:46:51.578]               tier=frontend
I0111 23:46:51.578] Annotations:  <none>
I0111 23:46:51.578] Replicas:     3 current / 3 desired
I0111 23:46:51.578] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.578] Pod Template:
I0111 23:46:51.578]   Labels:  app=guestbook
I0111 23:46:51.578]            tier=frontend
I0111 23:46:51.578]   Containers:
I0111 23:46:51.578]    php-redis:
I0111 23:46:51.578]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
I0111 23:46:51.696] Namespace:    namespace-1547250409-32613
I0111 23:46:51.696] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.697] Labels:       app=guestbook
I0111 23:46:51.697]               tier=frontend
I0111 23:46:51.697] Annotations:  <none>
I0111 23:46:51.697] Replicas:     3 current / 3 desired
I0111 23:46:51.697] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.697] Pod Template:
I0111 23:46:51.697]   Labels:  app=guestbook
I0111 23:46:51.697]            tier=frontend
I0111 23:46:51.697]   Containers:
I0111 23:46:51.697]    php-redis:
I0111 23:46:51.697]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 17 lines ...
I0111 23:46:51.807] Namespace:    namespace-1547250409-32613
I0111 23:46:51.807] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.807] Labels:       app=guestbook
I0111 23:46:51.807]               tier=frontend
I0111 23:46:51.807] Annotations:  <none>
I0111 23:46:51.807] Replicas:     3 current / 3 desired
I0111 23:46:51.807] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.807] Pod Template:
I0111 23:46:51.807]   Labels:  app=guestbook
I0111 23:46:51.807]            tier=frontend
I0111 23:46:51.807]   Containers:
I0111 23:46:51.808]    php-redis:
I0111 23:46:51.808]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 11 lines ...
I0111 23:46:51.921] Namespace:    namespace-1547250409-32613
I0111 23:46:51.922] Selector:     app=guestbook,tier=frontend
I0111 23:46:51.922] Labels:       app=guestbook
I0111 23:46:51.922]               tier=frontend
I0111 23:46:51.922] Annotations:  <none>
I0111 23:46:51.922] Replicas:     3 current / 3 desired
I0111 23:46:51.922] Pods Status:  0 Running / 3 Waiting / 0 Succeeded / 0 Failed
I0111 23:46:51.922] Pod Template:
I0111 23:46:51.922]   Labels:  app=guestbook
I0111 23:46:51.922]            tier=frontend
I0111 23:46:51.922]   Containers:
I0111 23:46:51.923]    php-redis:
I0111 23:46:51.923]     Image:      gcr.io/google_samples/gb-frontend:v3
... skipping 183 lines ...
I0111 23:46:57.539] horizontalpodautoscaler.autoscaling/frontend autoscaled
I0111 23:46:57.643] test-cmd-util.sh:3629: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80
I0111 23:46:57.730] horizontalpodautoscaler.autoscaling "frontend" deleted
W0111 23:46:57.830] I0111 23:46:57.052003   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"2e5efede-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2416", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-cgkrb
W0111 23:46:57.831] I0111 23:46:57.054724   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"2e5efede-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2416", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-gz9k8
W0111 23:46:57.831] I0111 23:46:57.054795   73116 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1547250409-32613", Name:"frontend", UID:"2e5efede-15fb-11e9-befc-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2416", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-726tk
W0111 23:46:57.831] Error: required flag(s) "max" not set
W0111 23:46:57.832] 
W0111 23:46:57.832] 
W0111 23:46:57.832] Examples:
W0111 23:46:57.832]   # Auto scale a deployment "foo", with the number of pods between 2 and 10, no target CPU utilization specified so a default autoscaling policy will be used:
W0111 23:46:57.832]   kubectl autoscale deployment foo --min=2 --max=10
W0111 23:46:57.832]   
... skipping 89 lines ...
I0111 23:47:01.224] test-cmd-util.sh:3751: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2
I0111 23:47:01.337] statefulset.apps/nginx rolled back
W0111 23:47:01.438] I0111 23:47:01.360522   73116 horizontal.go:366] Horizontal Pod Autoscaler has been deleted namespace-1547250381-25812/frontend
I0111 23:47:01.538] test-cmd-util.sh:3754: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
I0111 23:47:01.548] test-cmd-util.sh:3755: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0111 23:47:01.662] Successful
I0111 23:47:01.663] message:error: unable to find specified revision 1000000 in history
I0111 23:47:01.663] has:unable to find specified revision
I0111 23:47:01.763] test-cmd-util.sh:3759: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7:
I0111 23:47:01.861] test-cmd-util.sh:3760: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1
I0111 23:47:01.974] statefulset.apps/nginx rolled back
I0111 23:47:02.078] test-cmd-util.sh:3763: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.8:
I0111 23:47:02.180] test-cmd-util.sh:3764: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0:
... skipping 58 lines ...
I0111 23:47:04.137] Name:         mock
I0111 23:47:04.137] Namespace:    namespace-1547250423-18736
I0111 23:47:04.137] Selector:     app=mock
I0111 23:47:04.137] Labels:       app=mock
I0111 23:47:04.137] Annotations:  <none>
I0111 23:47:04.137] Replicas:     1 current / 1 desired
I0111 23:47:04.137] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0111 23:47:04.138] Pod Template:
I0111 23:47:04.138]   Labels:  app=mock
I0111 23:47:04.138]   Containers:
I0111 23:47:04.138]    mock-container:
I0111 23:47:04.138]     Image:        k8s.gcr.io/pause:2.0
I0111 23:47:04.138]     Port:         9949/TCP
... skipping 56 lines ...
I0111 23:47:06.498] Name:         mock
I0111 23:47:06.498] Namespace:    namespace-1547250423-18736
I0111 23:47:06.498] Selector:     app=mock
I0111 23:47:06.499] Labels:       app=mock
I0111 23:47:06.499] Annotations:  <none>
I0111 23:47:06.499] Replicas:     1 current / 1 desired
I0111 23:47:06.499] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0111 23:47:06.499] Pod Template:
I0111 23:47:06.499]   Labels:  app=mock
I0111 23:47:06.499]   Containers:
I0111 23:47:06.499]    mock-container:
I0111 23:47:06.499]     Image:        k8s.gcr.io/pause:2.0
I0111 23:47:06.500]     Port:         9949/TCP
... skipping 57 lines ...
I0111 23:47:08.911] Name:         mock
I0111 23:47:08.911] Namespace:    namespace-1547250423-18736
I0111 23:47:08.912] Selector:     app=mock
I0111 23:47:08.912] Labels:       app=mock
I0111 23:47:08.912] Annotations:  <none>
I0111 23:47:08.912] Replicas:     1 current / 1 desired
I0111 23:47:08.912] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0111 23:47:08.912] Pod Template:
I0111 23:47:08.912]   Labels:  app=mock
I0111 23:47:08.912]   Containers:
I0111 23:47:08.913]    mock-container:
I0111 23:47:08.913]     Image:        k8s.gcr.io/pause:2.0
I0111 23:47:08.913]     Port:         9949/TCP
... skipping 42 lines ...
I0111 23:47:11.080] Namespace:    namespace-1547250423-18736
I0111 23:47:11.080] Selector:     app=mock
I0111 23:47:11.080] Labels:       app=mock
I0111 23:47:11.080]               status=replaced
I0111 23:47:11.080] Annotations:  <none>
I0111 23:47:11.081] Replicas:     1 current / 1 desired
I0111 23:47:11.081] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0111 23:47:11.081] Pod Template:
I0111 23:47:11.081]   Labels:  app=mock
I0111 23:47:11.081]   Containers:
I0111 23:47:11.081]    mock-container:
I0111 23:47:11.081]     Image:        k8s.gcr.io/pause:2.0
I0111 23:47:11.081]     Port:         9949/TCP
... skipping 11 lines ...
I0111 23:47:11.088] Namespace:    namespace-1547250423-18736
I0111 23:47:11.088] Selector:     app=mock2
I0111 23:47:11.088] Labels:       app=mock2
I0111 23:47:11.088]               status=replaced
I0111 23:47:11.088] Annotations:  <none>
I0111 23:47:11.088] Replicas:     1 current / 1 desired
I0111 23:47:11.088] Pods Status:  0 Running / 1 Waiting / 0 Succeeded / 0 Failed
I0111 23:47:11.088] Pod Template:
I0111 23:47:11.088]   Labels:  app=mock2
I0111 23:47:11.089]   Containers:
I0111 23:47:11.089]    mock-container:
I0111 23:47:11.089]     Image:        k8s.gcr.io/pause:2.0
I0111 23:47:11.089]     Port:         9949/TCP
... skipping 109 lines ...
I0111 23:47:16.525] persistentvolume/pv0001 created
I0111 23:47:16.629] test-cmd-util.sh:4479: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0001:
I0111 23:47:16.713] persistentvolume "pv0001" deleted
I0111 23:47:16.865] persistentvolume/pv0002 created
I0111 23:47:16.968] test-cmd-util.sh:4482: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0002:
I0111 23:47:17.049] persistentvolume "pv0002" deleted
W0111 23:47:17.150] E0111 23:47:16.867641   73116 pv_protection_controller.go:116] PV pv0002 failed with : Operation cannot be fulfilled on persistentvolumes "pv0002": the object has been modified; please apply your changes to the latest version and try again
W0111 23:47:17.205] E0111 23:47:17.205112   73116 pv_protection_controller.go:116] PV pv0003 failed with : Operation cannot be fulfilled on persistentvolumes "pv0003": the object has been modified; please apply your changes to the latest version and try again
I0111 23:47:17.306] persistentvolume/pv0003 created
I0111 23:47:17.306] test-cmd-util.sh:4485: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0003:
I0111 23:47:17.385] persistentvolume "pv0003" deleted
I0111 23:47:17.488] test-cmd-util.sh:4488: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: 
I0111 23:47:17.503] +++ exit code: 0
I0111 23:47:17.548] Recording: run_persistent_volume_claims_tests
... skipping 459 lines ...
I0111 23:47:22.691] yes
I0111 23:47:22.692] has:the server doesn't have a resource type
I0111 23:47:22.821] Successful
I0111 23:47:22.821] message:yes
I0111 23:47:22.821] has:yes
I0111 23:47:22.949] Successful
I0111 23:47:22.950] message:error: --subresource can not be used with NonResourceURL
I0111 23:47:22.950] has:subresource can not be used with NonResourceURL
I0111 23:47:23.083] Successful
I0111 23:47:23.217] Successful
I0111 23:47:23.217] message:yes
I0111 23:47:23.218] 0
I0111 23:47:23.218] has:0
... skipping 822 lines ...
I0111 23:47:51.500] message:node/127.0.0.1 already uncordoned (dry run)
I0111 23:47:51.500] has:already uncordoned
I0111 23:47:51.594] test-cmd-util.sh:4971: Successful get nodes 127.0.0.1 {{.spec.unschedulable}}: <no value>
I0111 23:47:51.677] node/127.0.0.1 labeled
I0111 23:47:51.776] test-cmd-util.sh:4976: Successful get nodes 127.0.0.1 {{.metadata.labels.test}}: label
I0111 23:47:51.851] Successful
I0111 23:47:51.851] message:error: cannot specify both a node name and a --selector option
I0111 23:47:51.851] See 'kubectl drain -h' for help and examples.
I0111 23:47:51.851] has:cannot specify both a node name
I0111 23:47:51.928] Successful
I0111 23:47:51.929] message:error: USAGE: cordon NODE [flags]
I0111 23:47:51.929] See 'kubectl cordon -h' for help and examples.
I0111 23:47:51.929] has:error\: USAGE\: cordon NODE
I0111 23:47:52.013] node/127.0.0.1 already uncordoned
I0111 23:47:52.097] Successful
I0111 23:47:52.097] message:error: You must provide one or more resources by argument or filename.
I0111 23:47:52.097] Example resource specifications include:
I0111 23:47:52.097]    '-f rsrc.yaml'
I0111 23:47:52.097]    '--filename=rsrc.json'
I0111 23:47:52.097]    '<resource> <name>'
I0111 23:47:52.097]    '<resource>'
I0111 23:47:52.097] has:must provide one or more resources
... skipping 77 lines ...
I0111 23:47:52.569]   kubectl [flags] [options]
I0111 23:47:52.570] 
I0111 23:47:52.570] Use "kubectl <command> --help" for more information about a given command.
I0111 23:47:52.570] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0111 23:47:52.570] has:plugin\s\+Runs a command-line plugin
I0111 23:47:52.642] Successful
I0111 23:47:52.642] message:error: no plugins installed.
I0111 23:47:52.642] has:no plugins installed
I0111 23:47:52.724] Successful
I0111 23:47:52.724] message:Runs a command-line plugin. 
I0111 23:47:52.724] 
I0111 23:47:52.724] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.724] 
I0111 23:47:52.724] Available Commands:
I0111 23:47:52.724]   echo        Echoes for test-cmd
I0111 23:47:52.724]   env         The plugin envs plugin
I0111 23:47:52.725]   error       The tremendous plugin that always fails!
I0111 23:47:52.725]   get         The wonderful new plugin-based get!
I0111 23:47:52.725]   tree        Plugin with a tree of commands
I0111 23:47:52.725] 
I0111 23:47:52.725] Usage:
I0111 23:47:52.725]   kubectl plugin NAME [options]
I0111 23:47:52.725] 
... skipping 5 lines ...
I0111 23:47:52.727] 
I0111 23:47:52.727] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.727] 
I0111 23:47:52.727] Available Commands:
I0111 23:47:52.727]   echo        Echoes for test-cmd
I0111 23:47:52.727]   env         The plugin envs plugin
I0111 23:47:52.727]   error       The tremendous plugin that always fails!
I0111 23:47:52.727]   get         The wonderful new plugin-based get!
I0111 23:47:52.727]   tree        Plugin with a tree of commands
I0111 23:47:52.727] 
I0111 23:47:52.727] Usage:
I0111 23:47:52.728]   kubectl plugin NAME [options]
I0111 23:47:52.728] 
... skipping 5 lines ...
I0111 23:47:52.728] 
I0111 23:47:52.729] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.729] 
I0111 23:47:52.729] Available Commands:
I0111 23:47:52.729]   echo        Echoes for test-cmd
I0111 23:47:52.729]   env         The plugin envs plugin
I0111 23:47:52.729]   error       The tremendous plugin that always fails!
I0111 23:47:52.729]   get         The wonderful new plugin-based get!
I0111 23:47:52.729]   tree        Plugin with a tree of commands
I0111 23:47:52.729] 
I0111 23:47:52.729] Usage:
I0111 23:47:52.729]   kubectl plugin NAME [options]
I0111 23:47:52.729] 
I0111 23:47:52.730] Use "kubectl <command> --help" for more information about a given command.
I0111 23:47:52.730] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0111 23:47:52.730] has:error\s\+The tremendous plugin that always fails!
I0111 23:47:52.731] Successful
I0111 23:47:52.731] message:Runs a command-line plugin. 
I0111 23:47:52.731] 
I0111 23:47:52.731] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.731] 
I0111 23:47:52.731] Available Commands:
I0111 23:47:52.731]   echo        Echoes for test-cmd
I0111 23:47:52.731]   env         The plugin envs plugin
I0111 23:47:52.732]   error       The tremendous plugin that always fails!
I0111 23:47:52.732]   get         The wonderful new plugin-based get!
I0111 23:47:52.732]   tree        Plugin with a tree of commands
I0111 23:47:52.732] 
I0111 23:47:52.732] Usage:
I0111 23:47:52.732]   kubectl plugin NAME [options]
I0111 23:47:52.732] 
... skipping 5 lines ...
I0111 23:47:52.733] 
I0111 23:47:52.733] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.733] 
I0111 23:47:52.733] Available Commands:
I0111 23:47:52.733]   echo        Echoes for test-cmd
I0111 23:47:52.734]   env         The plugin envs plugin
I0111 23:47:52.734]   error       The tremendous plugin that always fails!
I0111 23:47:52.734]   get         The wonderful new plugin-based get!
I0111 23:47:52.734]   tree        Plugin with a tree of commands
I0111 23:47:52.734] 
I0111 23:47:52.734] Usage:
I0111 23:47:52.734]   kubectl plugin NAME [options]
I0111 23:47:52.734] 
... skipping 5 lines ...
I0111 23:47:52.736] 
I0111 23:47:52.736] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.736] 
I0111 23:47:52.736] Available Commands:
I0111 23:47:52.736]   echo        Echoes for test-cmd
I0111 23:47:52.736]   env         The plugin envs plugin
I0111 23:47:52.736]   error       The tremendous plugin that always fails!
I0111 23:47:52.736]   get         The wonderful new plugin-based get!
I0111 23:47:52.736]   tree        Plugin with a tree of commands
I0111 23:47:52.737] 
I0111 23:47:52.737] Usage:
I0111 23:47:52.737]   kubectl plugin NAME [options]
I0111 23:47:52.737] 
... skipping 5 lines ...
I0111 23:47:52.816] 
I0111 23:47:52.816] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.816] 
I0111 23:47:52.816] Available Commands:
I0111 23:47:52.817]   echo        Echoes for test-cmd
I0111 23:47:52.817]   env         The plugin envs plugin
I0111 23:47:52.817]   error       The tremendous plugin that always fails!
I0111 23:47:52.817]   get         The wonderful new plugin-based get!
I0111 23:47:52.817]   hello       The hello plugin
I0111 23:47:52.817]   tree        Plugin with a tree of commands
I0111 23:47:52.817] 
I0111 23:47:52.817] Usage:
I0111 23:47:52.818]   kubectl plugin NAME [options]
... skipping 6 lines ...
I0111 23:47:52.818] 
I0111 23:47:52.819] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.819] 
I0111 23:47:52.819] Available Commands:
I0111 23:47:52.819]   echo        Echoes for test-cmd
I0111 23:47:52.819]   env         The plugin envs plugin
I0111 23:47:52.819]   error       The tremendous plugin that always fails!
I0111 23:47:52.819]   get         The wonderful new plugin-based get!
I0111 23:47:52.819]   hello       The hello plugin
I0111 23:47:52.819]   tree        Plugin with a tree of commands
I0111 23:47:52.819] 
I0111 23:47:52.819] Usage:
I0111 23:47:52.820]   kubectl plugin NAME [options]
... skipping 6 lines ...
I0111 23:47:52.820] 
I0111 23:47:52.821] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.821] 
I0111 23:47:52.821] Available Commands:
I0111 23:47:52.821]   echo        Echoes for test-cmd
I0111 23:47:52.821]   env         The plugin envs plugin
I0111 23:47:52.821]   error       The tremendous plugin that always fails!
I0111 23:47:52.821]   get         The wonderful new plugin-based get!
I0111 23:47:52.821]   hello       The hello plugin
I0111 23:47:52.821]   tree        Plugin with a tree of commands
I0111 23:47:52.822] 
I0111 23:47:52.822] Usage:
I0111 23:47:52.822]   kubectl plugin NAME [options]
I0111 23:47:52.822] 
I0111 23:47:52.822] Use "kubectl <command> --help" for more information about a given command.
I0111 23:47:52.822] Use "kubectl options" for a list of global command-line options (applies to all commands).
I0111 23:47:52.822] has:error\s\+The tremendous plugin that always fails!
I0111 23:47:52.822] Successful
I0111 23:47:52.822] message:Runs a command-line plugin. 
I0111 23:47:52.823] 
I0111 23:47:52.823] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.823] 
I0111 23:47:52.823] Available Commands:
I0111 23:47:52.823]   echo        Echoes for test-cmd
I0111 23:47:52.823]   env         The plugin envs plugin
I0111 23:47:52.823]   error       The tremendous plugin that always fails!
I0111 23:47:52.823]   get         The wonderful new plugin-based get!
I0111 23:47:52.824]   hello       The hello plugin
I0111 23:47:52.824]   tree        Plugin with a tree of commands
I0111 23:47:52.824] 
I0111 23:47:52.824] Usage:
I0111 23:47:52.824]   kubectl plugin NAME [options]
... skipping 6 lines ...
I0111 23:47:52.825] 
I0111 23:47:52.826] Plugins are subcommands that are not part of the major command-line distribution and can even be provided by third-parties. Please refer to the documentation and examples for more information about how to install and write your own plugins.
I0111 23:47:52.826] 
I0111 23:47:52.826] Available Commands:
I0111 23:47:52.826]   echo        Echoes for test-cmd
I0111 23:47:52.826]   env         The plugin envs plugin
I0111 23:47:52.826]   error       The tremendous plugin that always fails!
I0111 23:47:52.826]   get         The wonderful new plugin-based get!
I0111 23:47:52.826]   hello       The hello plugin
I0111 23:47:52.826]   tree        Plugin with a tree of commands
I0111 23:47:52.826] 
I0111 23:47:52.826] Usage:
I0111 23:47:52.826]   kubectl plugin NAME [options]
... skipping 159 lines ...
I0111 23:47:53.074] #######
I0111 23:47:53.074] has:#hello#
I0111 23:47:53.157] Successful
I0111 23:47:53.157] message:This plugin works!
I0111 23:47:53.158] has:This plugin works!
I0111 23:47:53.235] Successful
I0111 23:47:53.235] message:error: unknown command "hello"
I0111 23:47:53.235] See 'kubectl plugin -h' for help and examples.
I0111 23:47:53.236] has:unknown command
I0111 23:47:53.316] Successful
I0111 23:47:53.316] message:error: exit status 1
I0111 23:47:53.316] has:error: exit status 1
I0111 23:47:53.397] Successful
I0111 23:47:53.397] message:Plugin with a tree of commands
I0111 23:47:53.397] 
I0111 23:47:53.397] Available Commands:
I0111 23:47:53.398]   child1      The first child of a tree
I0111 23:47:53.398]   child2      The second child of a tree
... skipping 467 lines ...
I0111 23:47:53.844] 
I0111 23:47:53.846] +++ Running case: test-cmd.run_impersonation_tests 
I0111 23:47:53.849] +++ working dir: /go/src/k8s.io/kubernetes
I0111 23:47:53.851] +++ command: run_impersonation_tests
I0111 23:47:53.859] +++ [0111 23:47:53] Testing impersonation
I0111 23:47:53.938] Successful
I0111 23:47:53.938] message:error: requesting groups or user-extra for  without impersonating a user
I0111 23:47:53.938] has:without impersonating a user
I0111 23:47:54.097] certificatesigningrequest.certificates.k8s.io/foo created
I0111 23:47:54.200] test-cmd-util.sh:5101: Successful get csr/foo {{.spec.username}}: user1
I0111 23:47:54.296] test-cmd-util.sh:5102: Successful get csr/foo {{range .spec.groups}}{{.}}{{end}}: system:authenticated
I0111 23:47:54.383] certificatesigningrequest.certificates.k8s.io "foo" deleted
I0111 23:47:54.539] certificatesigningrequest.certificates.k8s.io/foo created
... skipping 195 lines ...
I0111 23:56:21.614] ok  	k8s.io/kubernetes/test/integration/replicationcontroller	57.546s
I0111 23:56:21.614] [restful] 2019/01/11 23:51:39 log.go:33: [restful/swagger] listing is available at https://172.17.0.2:39133/swaggerapi
I0111 23:56:21.614] [restful] 2019/01/11 23:51:39 log.go:33: [restful/swagger] https://172.17.0.2:39133/swaggerui/ is mapped to folder /swagger-ui/
I0111 23:56:21.614] [restful] 2019/01/11 23:51:41 log.go:33: [restful/swagger] listing is available at https://172.17.0.2:39133/swaggerapi
I0111 23:56:21.614] [restful] 2019/01/11 23:51:41 log.go:33: [restful/swagger] https://172.17.0.2:39133/swaggerui/ is mapped to folder /swagger-ui/
I0111 23:56:21.614] ok  	k8s.io/kubernetes/test/integration/scale	13.888s
I0111 23:56:21.615] FAIL	k8s.io/kubernetes/test/integration/scheduler	264.055s
I0111 23:56:21.615] ok  	k8s.io/kubernetes/test/integration/scheduler_perf	0.956s
I0111 23:56:21.615] ok  	k8s.io/kubernetes/test/integration/secrets	4.600s
I0111 23:56:21.615] ok  	k8s.io/kubernetes/test/integration/serviceaccount	45.840s
I0111 23:56:21.615] ok  	k8s.io/kubernetes/test/integration/statefulset	11.813s
I0111 23:56:21.615] ok  	k8s.io/kubernetes/test/integration/storageclasses	4.410s
I0111 23:56:21.615] [restful] 2019/01/11 23:52:59 log.go:33: [restful/swagger] listing is available at https://172.17.0.2:36605/swaggerapi
... skipping 2 lines ...
I0111 23:56:21.615] [restful] 2019/01/11 23:53:01 log.go:33: [restful/swagger] https://172.17.0.2:36605/swaggerui/ is mapped to folder /swagger-ui/
I0111 23:56:21.616] ok  	k8s.io/kubernetes/test/integration/tls	12.661s
I0111 23:56:21.616] ok  	k8s.io/kubernetes/test/integration/ttlcontroller	10.555s
I0111 23:56:21.616] ok  	k8s.io/kubernetes/test/integration/volume	88.148s
I0111 23:56:21.679] ok  	k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration	107.735s
I0111 23:56:22.235] +++ [0111 23:56:22] Saved JUnit XML test report to /workspace/artifacts/junit_cae8d27844a37937152775ec7fb068d1755ac188_20190111-234821.xml
I0111 23:56:22.238] Makefile:184: recipe for target 'test' failed
I0111 23:56:22.248] +++ [0111 23:56:22] Cleaning up etcd
W0111 23:56:22.349] make[1]: *** [test] Error 1
W0111 23:56:22.349] !!! [0111 23:56:22] Call tree:
W0111 23:56:22.349] !!! [0111 23:56:22]  1: hack/make-rules/test-integration.sh:105 runTests(...)
W0111 23:56:22.446] make: *** [test-integration] Error 1
I0111 23:56:22.547] +++ [0111 23:56:22] Integration test cleanup complete
I0111 23:56:22.547] Makefile:203: recipe for target 'test-integration' failed
W0111 23:56:23.643] Traceback (most recent call last):
W0111 23:56:23.643]   File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 178, in <module>
W0111 23:56:23.643]     ARGS.exclude_typecheck, ARGS.exclude_godep)
W0111 23:56:23.643]   File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 140, in main
W0111 23:56:23.643]     check(*cmd)
W0111 23:56:23.643]   File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 48, in check
W0111 23:56:23.643]     subprocess.check_call(cmd)
W0111 23:56:23.644]   File "/usr/lib/python2.7/subprocess.py", line 186, in check_call
W0111 23:56:23.644]     raise CalledProcessError(retcode, cmd)
W0111 23:56:23.644] subprocess.CalledProcessError: Command '('docker', 'run', '--rm=true', '--privileged=true', '-v', '/var/run/docker.sock:/var/run/docker.sock', '-v', '/etc/localtime:/etc/localtime:ro', '-v', '/workspace/k8s.io/kubernetes:/go/src/k8s.io/kubernetes', '-v', '/workspace/k8s.io/:/workspace/k8s.io/', '-v', '/workspace/_artifacts:/workspace/artifacts', '-e', 'KUBE_FORCE_VERIFY_CHECKS=n', '-e', 'KUBE_VERIFY_GIT_BRANCH=release-1.11', '-e', 'EXCLUDE_TYPECHECK=n', '-e', 'EXCLUDE_GODEP=n', '-e', 'REPO_DIR=/workspace/k8s.io/kubernetes', '--tmpfs', '/tmp:exec,mode=1777', 'gcr.io/k8s-testimages/kubekins-test:1.11-v20181218-db74ab3f4', 'bash', '-c', 'cd kubernetes && ./hack/jenkins/test-dockerized.sh')' returned non-zero exit status 2
E0111 23:56:23.649] Command failed
I0111 23:56:23.649] process 686 exited with code 1 after 20.8m
E0111 23:56:23.650] FAIL: pull-kubernetes-integration
I0111 23:56:23.650] Call:  gcloud auth activate-service-account --key-file=/etc/service-account/service-account.json
W0111 23:56:24.120] Activated service account credentials for: [pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com]
I0111 23:56:24.179] process 123997 exited with code 0 after 0.0m
I0111 23:56:24.179] Call:  gcloud config get-value account
I0111 23:56:24.455] process 124009 exited with code 0 after 0.0m
I0111 23:56:24.455] Will upload results to gs://kubernetes-jenkins/pr-logs using pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com
I0111 23:56:24.456] Upload result and artifacts...
I0111 23:56:24.456] Gubernator results at https://gubernator.k8s.io/build/kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41088
I0111 23:56:24.456] Call:  gsutil ls gs://kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41088/artifacts
W0111 23:56:25.505] CommandException: One or more URLs matched no objects.
E0111 23:56:25.634] Command failed
I0111 23:56:25.635] process 124021 exited with code 1 after 0.0m
W0111 23:56:25.635] Remote dir gs://kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41088/artifacts not exist yet
I0111 23:56:25.635] Call:  gsutil -m -q -o GSUtil:use_magicfile=True cp -r -c -z log,txt,xml /workspace/_artifacts gs://kubernetes-jenkins/pr-logs/pull/72601/pull-kubernetes-integration/41088/artifacts
I0111 23:56:28.549] process 124163 exited with code 0 after 0.0m
W0111 23:56:28.549] metadata path /workspace/_artifacts/metadata.json does not exist
W0111 23:56:28.550] metadata not found or invalid, init with empty metadata
... skipping 23 lines ...