go test -v k8s.io/kubernetes/test/integration/scheduler -run TestNodePIDPressure$
I1206 13:20:49.375375 121717 services.go:33] Network range for service cluster IPs is unspecified. Defaulting to {10.0.0.0 ffffff00}.
I1206 13:20:49.375415 121717 master.go:272] Node port range unspecified. Defaulting to 30000-32767.
I1206 13:20:49.375427 121717 master.go:228] Using reconciler:
I1206 13:20:49.377437 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.377470 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.377508 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.377568 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.377916 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.378344 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.378359 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.378393 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.378569 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.379369 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.379434 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.379447 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.379481 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.379539 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.379955 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.380309 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.380387 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.380466 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.380535 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.380979 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.381163 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.381179 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.381208 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.381891 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.382444 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.382754 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.382787 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.382818 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.382902 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.383397 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.383740 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.383762 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.383796 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.383855 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.384153 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.384517 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.384537 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.384572 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.384640 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.384884 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.385240 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.385267 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.385298 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.385354 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.385732 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.386019 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.386079 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.386123 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.386207 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.387519 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.387828 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.387851 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.387881 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.388111 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.388472 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.388685 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.388701 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.388728 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.389000 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.389610 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.389721 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.389736 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.389763 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.389819 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.390370 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.390392 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.390420 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.390441 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.390576 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.390819 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.391081 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.391097 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.391124 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.391316 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.392471 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.392511 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.392526 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.392561 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.392604 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.393189 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.393192 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.393208 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.393236 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.393269 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.393491 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.410077 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.410174 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.410272 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.410339 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.410961 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.411356 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.411386 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.411425 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.411721 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.412394 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.412425 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.412459 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.412539 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.412802 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.413393 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.413422 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.413455 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.413568 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.413783 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.426671 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.426909 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.426955 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.427019 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.427078 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.427547 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.427862 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.427877 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.427915 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.428001 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.428750 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.428921 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.428984 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.429033 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.429096 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.429694 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.429709 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.429737 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.429800 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.430124 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.430654 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.430674 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.430702 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.430839 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.431067 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.431647 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.431665 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.431691 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.431758 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.432069 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.432506 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.432695 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.432711 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.432740 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.433007 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.433599 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.433619 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.433655 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.433730 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.433900 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.434562 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.434576 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.434604 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.434684 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.434888 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.435512 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.435651 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.435666 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.435694 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.435922 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.437646 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.437798 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.437827 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.437889 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.438029 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.438635 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.438657 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.438687 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.438759 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.438910 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.440182 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.440203 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.440233 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.440320 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.440501 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.441437 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.441459 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.441621 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.441487 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.442387 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.443074 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.443100 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.443131 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.443205 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.443431 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.443872 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.443901 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.443951 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.444080 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.444266 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.444823 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.444851 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.444881 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.445019 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.445152 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.445579 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.445605 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.445636 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.445735 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.446024 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.446556 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.446582 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.446611 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.446689 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.446963 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.447246 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.447455 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.447475 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.447548 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.447709 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.448477 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.448507 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.448560 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.448680 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.448900 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.449760 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.458594 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.458622 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.462562 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.462622 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.462948 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.465143 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.465161 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.465224 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.466059 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.472553 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.473778 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.473814 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.473866 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.473955 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.474289 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.474661 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.474690 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.474723 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.474863 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.476205 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.476223 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.476255 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.476366 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.476547 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.478168 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.478201 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.478355 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.479466 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.480084 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.480469 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.481178 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.481203 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.481235 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.481336 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.481571 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.482141 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.482736 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.482819 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.483093 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.484541 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.486091 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.486119 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.486155 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.486217 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.486790 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.486813 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.486828 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.486885 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.487042 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.488821 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.489294 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.489310 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.489364 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.489409 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.490018 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.490036 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.490099 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.490176 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.490443 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.491036 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.491054 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.491083 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.491122 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.491221 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.495184 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.495200 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.495230 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.495299 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.495546 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.496102 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.496117 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.496146 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.496203 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.496403 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.497007 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.497023 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.497053 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.497157 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.497451 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.498043 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.498526 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.498565 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.498626 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.498791 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.499466 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.499571 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.499622 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.499704 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.500027 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.500505 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.500548 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.500583 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.500689 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.500894 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.501432 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.501462 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.501513 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.501593 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.501826 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.502680 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:49.502723 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:49.502767 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:49.502860 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.503126 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:49.515184 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
W1206 13:20:49.528191 121717 genericapiserver.go:334] Skipping API batch/v2alpha1 because it has no resources.
W1206 13:20:49.544725 121717 genericapiserver.go:334] Skipping API rbac.authorization.k8s.io/v1alpha1 because it has no resources.
W1206 13:20:49.545454 121717 genericapiserver.go:334] Skipping API scheduling.k8s.io/v1alpha1 because it has no resources.
W1206 13:20:49.547962 121717 genericapiserver.go:334] Skipping API storage.k8s.io/v1alpha1 because it has no resources.
W1206 13:20:49.563249 121717 genericapiserver.go:334] Skipping API admissionregistration.k8s.io/v1alpha1 because it has no resources.
I1206 13:20:50.376065 121717 clientconn.go:551] parsed scheme: ""
I1206 13:20:50.376097 121717 clientconn.go:557] scheme "" not registered, fallback to default scheme
I1206 13:20:50.376140 121717 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}]
I1206 13:20:50.376203 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:50.376705 121717 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}]
I1206 13:20:50.570596 121717 storage_scheduling.go:91] created PriorityClass system-node-critical with value 2000001000
I1206 13:20:50.573606 121717 storage_scheduling.go:91] created PriorityClass system-cluster-critical with value 2000000000
I1206 13:20:50.573626 121717 storage_scheduling.go:100] all system priority classes are created successfully or already exist.
I1206 13:20:50.591330 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/cluster-admin
I1206 13:20:50.594397 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:discovery
I1206 13:20:50.597200 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:basic-user
I1206 13:20:50.600756 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/admin
I1206 13:20:50.603524 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/edit
I1206 13:20:50.606251 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/view
I1206 13:20:50.609398 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-admin
I1206 13:20:50.612754 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-edit
I1206 13:20:50.618766 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aggregate-to-view
I1206 13:20:50.622218 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:heapster
I1206 13:20:50.626166 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node
I1206 13:20:50.629266 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-problem-detector
I1206 13:20:50.632606 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-proxier
I1206 13:20:50.636000 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kubelet-api-admin
I1206 13:20:50.638859 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:node-bootstrapper
I1206 13:20:50.643715 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:auth-delegator
I1206 13:20:50.648869 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-aggregator
I1206 13:20:50.652162 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-controller-manager
I1206 13:20:50.655140 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-scheduler
I1206 13:20:50.658448 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:kube-dns
I1206 13:20:50.661561 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:persistent-volume-provisioner
I1206 13:20:50.664826 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:csi-external-attacher
I1206 13:20:50.669024 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:aws-cloud-provider
I1206 13:20:50.672003 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:nodeclient
I1206 13:20:50.674784 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:certificates.k8s.io:certificatesigningrequests:selfnodeclient
I1206 13:20:50.677880 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:volume-scheduler
I1206 13:20:50.680688 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:csi-external-provisioner
I1206 13:20:50.685073 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I1206 13:20:50.689053 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I1206 13:20:50.692219 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:cronjob-controller
I1206 13:20:50.699165 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I1206 13:20:50.701784 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:deployment-controller
I1206 13:20:50.704543 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:disruption-controller
I1206 13:20:50.707017 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:endpoint-controller
I1206 13:20:50.710191 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:expand-controller
I1206 13:20:50.712601 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I1206 13:20:50.715866 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I1206 13:20:50.719180 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:job-controller
I1206 13:20:50.722233 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:namespace-controller
I1206 13:20:50.731275 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:node-controller
I1206 13:20:50.734518 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I1206 13:20:50.737320 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I1206 13:20:50.740344 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:replicaset-controller
I1206 13:20:50.743194 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:replication-controller
I1206 13:20:50.745828 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I1206 13:20:50.748672 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:route-controller
I1206 13:20:50.751204 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:service-account-controller
I1206 13:20:50.754068 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:service-controller
I1206 13:20:50.757375 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:statefulset-controller
I1206 13:20:50.760173 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:ttl-controller
I1206 13:20:50.771615 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:certificate-controller
I1206 13:20:50.809352 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I1206 13:20:50.849296 121717 storage_rbac.go:187] created clusterrole.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I1206 13:20:50.889594 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/cluster-admin
I1206 13:20:50.929621 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:discovery
I1206 13:20:50.969855 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:basic-user
I1206 13:20:51.009574 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:node-proxier
I1206 13:20:51.049452 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-controller-manager
I1206 13:20:51.089203 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-dns
I1206 13:20:51.129320 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:kube-scheduler
I1206 13:20:51.172293 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:aws-cloud-provider
I1206 13:20:51.210534 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:node
I1206 13:20:51.249307 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:volume-scheduler
I1206 13:20:51.289248 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:attachdetach-controller
I1206 13:20:51.329620 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:clusterrole-aggregation-controller
I1206 13:20:51.369731 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:cronjob-controller
I1206 13:20:51.410550 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:daemon-set-controller
I1206 13:20:51.466021 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:deployment-controller
I1206 13:20:51.489683 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:disruption-controller
I1206 13:20:51.529661 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:endpoint-controller
I1206 13:20:51.569887 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:expand-controller
I1206 13:20:51.609213 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:generic-garbage-collector
I1206 13:20:51.649206 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:horizontal-pod-autoscaler
I1206 13:20:51.699137 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:job-controller
I1206 13:20:51.735606 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:namespace-controller
I1206 13:20:51.770193 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:node-controller
I1206 13:20:51.809620 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:persistent-volume-binder
I1206 13:20:51.849197 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pod-garbage-collector
I1206 13:20:51.889476 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replicaset-controller
I1206 13:20:51.929444 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:replication-controller
I1206 13:20:51.968983 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:resourcequota-controller
I1206 13:20:52.010544 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:route-controller
I1206 13:20:52.049247 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-account-controller
I1206 13:20:52.089459 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:service-controller
I1206 13:20:52.129095 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:statefulset-controller
I1206 13:20:52.169364 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:ttl-controller
I1206 13:20:52.209655 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:certificate-controller
I1206 13:20:52.249068 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pvc-protection-controller
I1206 13:20:52.289752 121717 storage_rbac.go:215] created clusterrolebinding.rbac.authorization.k8s.io/system:controller:pv-protection-controller
I1206 13:20:52.329572 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
I1206 13:20:52.373961 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/extension-apiserver-authentication-reader in kube-system
I1206 13:20:52.418843 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I1206 13:20:52.452531 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I1206 13:20:52.489697 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I1206 13:20:52.529358 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I1206 13:20:52.569850 121717 storage_rbac.go:246] created role.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I1206 13:20:52.609477 121717 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-controller-manager in kube-system
I1206 13:20:52.649489 121717 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system::leader-locking-kube-scheduler in kube-system
I1206 13:20:52.689205 121717 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-system
I1206 13:20:52.729507 121717 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:cloud-provider in kube-system
I1206 13:20:52.770091 121717 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:token-cleaner in kube-system
I1206 13:20:52.809785 121717 storage_rbac.go:276] created rolebinding.rbac.authorization.k8s.io/system:controller:bootstrap-signer in kube-public
W1206 13:20:52.869209 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869274 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869297 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869318 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869355 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869367 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869377 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869388 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869403 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
W1206 13:20:52.869413 121717 mutation_detector.go:48] Mutation detector is enabled, this will result in memory leakage.
I1206 13:20:52.869619 121717 controller_utils.go:1027] Waiting for caches to sync for scheduler controller
I1206 13:20:52.969827 121717 controller_utils.go:1034] Caches are synced for scheduler controller
E1206 13:21:24.991846 121717 factory.go:1352] Error while retrieving next pod from scheduling queue: scheduling queue is closed
I1206 13:21:24.996432 121717 controller.go:170] Shutting down kubernetes service endpoint reconciler
from junit_f5a444384056ebac4f2929ce7b7920ea9733ca19_20181206-131627.xml
Find from mentions in log files | View test history on testgrid
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdCompletion
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/accept_a_valid_port_number
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/fail_on_negative_port_number
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/fail_on_non-string_port
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitAPIPort/fail_on_too_large_port_number
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitCertPhaseCSR
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitCertPhaseCSR/fails_on_CSR
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitCertPhaseCSR/fails_on_all
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitCertPhaseCSR/generate_CSR
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can't_load_v1alpha1_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can't_load_v1alpha2_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can_load_v1alpha3_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/can_load_v1beta1_config
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/don't_allow_mixed_arguments_v1alpha3
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/don't_allow_mixed_arguments_v1beta1
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitConfig/fail_on_non_existing_path
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitKubernetesVersion
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitKubernetesVersion/invalid_semantic_version_string_is_detected
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitKubernetesVersion/valid_version_is_accepted
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken/invalid_token_non-lowercase
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken/invalid_token_size
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdInitToken/valid_token_is_accepted
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinArgsMixed
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinBadArgs
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinConfig
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryFile
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinDiscoveryToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinNodeName
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinTLSBootstrapToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdJoinToken
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenDelete
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenGenerate
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdTokenGenerateTypoError
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersion
k8s.io/kubernetes/cmd/kubeadm/test/cmd TestCmdVersionOutputJsonOrYaml
k8s.io/kubernetes/test/integration/apimachinery TestWatchRestartsIfTimeoutNotReached
k8s.io/kubernetes/test/integration/apimachinery TestWatchRestartsIfTimeoutNotReached/InformerWatcher_survives_closed_watches
k8s.io/kubernetes/test/integration/apimachinery TestWatchRestartsIfTimeoutNotReached/regular_watcher_should_fail
k8s.io/kubernetes/test/integration/apiserver Test202StatusCode
k8s.io/kubernetes/test/integration/apiserver TestAPIListChunking
k8s.io/kubernetes/test/integration/apiserver TestNameInFieldSelector
k8s.io/kubernetes/test/integration/apiserver TestPatchConflicts
k8s.io/kubernetes/test/integration/apiserver TestServerSidePrint
k8s.io/kubernetes/test/integration/auth TestAliceNotForbiddenOrUnauthorized
k8s.io/kubernetes/test/integration/auth TestAuthModeAlwaysAllow
k8s.io/kubernetes/test/integration/auth TestAuthModeAlwaysDeny
k8s.io/kubernetes/test/integration/auth TestAuthorizationAttributeDetermination
k8s.io/kubernetes/test/integration/auth TestBobIsForbidden
k8s.io/kubernetes/test/integration/auth TestBootstrapTokenAuth
k8s.io/kubernetes/test/integration/auth TestBootstrapping
k8s.io/kubernetes/test/integration/auth TestImpersonateIsForbidden
k8s.io/kubernetes/test/integration/auth TestKindAuthorization
k8s.io/kubernetes/test/integration/auth TestLocalSubjectAccessReview
k8s.io/kubernetes/test/integration/auth TestNamespaceAuthorization
k8s.io/kubernetes/test/integration/auth TestNodeAuthorizer
k8s.io/kubernetes/test/integration/auth TestRBAC
k8s.io/kubernetes/test/integration/auth TestReadOnlyAuthorization
k8s.io/kubernetes/test/integration/auth TestSelfSubjectAccessReview
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/a_token_request_with_out-of-range_expiration
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/a_token_request_within_expiration_time
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/a_token_should_be_invalid_after_recreating_same_name_pod
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/a_token_should_be_invalid_after_recreating_same_name_secret
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/a_token_without_an_api_audience_is_invalid
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/a_tokenrequest_without_an_audience_is_valid_against_the_api
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/bound_to_service_account
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/bound_to_service_account_and_pod
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/bound_to_service_account_and_pod_running_as_different_service_account
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/bound_to_service_account_and_secret
k8s.io/kubernetes/test/integration/auth TestServiceAccountTokenCreate/expired_token
k8s.io/kubernetes/test/integration/auth TestSubjectAccessReview
k8s.io/kubernetes/test/integration/auth TestUnknownUserIsUnauthorized
k8s.io/kubernetes/test/integration/auth TestWebhookTokenAuthenticator
k8s.io/kubernetes/test/integration/client TestAPIVersions
k8s.io/kubernetes/test/integration/client TestAtomicPut
k8s.io/kubernetes/test/integration/client TestClient
k8s.io/kubernetes/test/integration/client TestDynamicClient
k8s.io/kubernetes/test/integration/client TestDynamicClientWatch
k8s.io/kubernetes/test/integration/client TestPatch
k8s.io/kubernetes/test/integration/client TestPatchWithCreateOnUpdate
k8s.io/kubernetes/test/integration/client TestSelfLinkOnNamespace
k8s.io/kubernetes/test/integration/client TestSingleWatch
k8s.io/kubernetes/test/integration/configmap TestConfigMap
k8s.io/kubernetes/test/integration/cronjob TestCronJobLaunchesPodAndCleansUp
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(false)
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(false)/TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(false)/TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(true)
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(true)/TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(true)/TestDaemonSetWithNodeSelectorLaunchesPods/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestInsufficientCapacityNodeDaemonDoesNotLaunchPod
k8s.io/kubernetes/test/integration/daemonset TestInsufficientCapacityNodeDaemonDoesNotLaunchPod/TestInsufficientCapacityNodeDaemonDoesNotLaunchPod_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestInsufficientCapacityNodeDaemonDoesNotLaunchPod/TestInsufficientCapacityNodeDaemonDoesNotLaunchPod_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestInsufficientCapacityNodeWhenScheduleDaemonSetPodsEnabled
k8s.io/kubernetes/test/integration/daemonset TestInsufficientCapacityNodeWhenScheduleDaemonSetPodsEnabled/TestInsufficientCapacityNodeWhenScheduleDaemonSetPodsEnabled_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestInsufficientCapacityNodeWhenScheduleDaemonSetPodsEnabled/TestInsufficientCapacityNodeWhenScheduleDaemonSetPodsEnabled_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestLaunchWithHashCollision
k8s.io/kubernetes/test/integration/daemonset TestNotReadyNodeDaemonDoesLaunchPod
k8s.io/kubernetes/test/integration/daemonset TestNotReadyNodeDaemonDoesLaunchPod/TestNotReadyNodeDaemonDoesLaunchPod_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestNotReadyNodeDaemonDoesLaunchPod/TestNotReadyNodeDaemonDoesLaunchPod_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(false)
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(false)/TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(false)/TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(true)
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(true)/TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(true)/TestOneNodeDaemonLaunchesPod/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(false)
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(false)/TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(false)/TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(true)
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(true)/TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(true)/TestSimpleDaemonSetLaunchesPods/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode/ScheduleDaemonSetPods_(false)
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode/ScheduleDaemonSetPods_(false)/TestTaintedNode/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode/ScheduleDaemonSetPods_(false)/TestTaintedNode/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode/ScheduleDaemonSetPods_(true)
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode/ScheduleDaemonSetPods_(true)/TestTaintedNode/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestTaintedNode/ScheduleDaemonSetPods_(true)/TestTaintedNode/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(false)
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(false)/TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(false)/TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(false)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(true)
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(true)/TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:OnDelete,RollingUpdate:nil,})
k8s.io/kubernetes/test/integration/daemonset TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(true)/TestUnschedulableNodeDaemonDoesLaunchPod/ScheduleDaemonSetPods_(true)_(&DaemonSetUpdateStrategy{Type:RollingUpdate,RollingUpdate:&RollingUpdateDaemonSet{MaxUnavailable:1,},})
k8s.io/kubernetes/test/integration/defaulttolerationseconds TestAdmission
k8s.io/kubernetes/test/integration/deployment TestDeploymentAvailableCondition
k8s.io/kubernetes/test/integration/deployment TestDeploymentHashCollision
k8s.io/kubernetes/test/integration/deployment TestDeploymentRollingUpdate
k8s.io/kubernetes/test/integration/deployment TestDeploymentScaleSubresource
k8s.io/kubernetes/test/integration/deployment TestDeploymentSelectorImmutability
k8s.io/kubernetes/test/integration/deployment TestFailedDeployment
k8s.io/kubernetes/test/integration/deployment TestGeneralReplicaSetAdoption
k8s.io/kubernetes/test/integration/deployment TestNewDeployment
k8s.io/kubernetes/test/integration/deployment TestOverlappingDeployments
k8s.io/kubernetes/test/integration/deployment TestPausedDeployment
k8s.io/kubernetes/test/integration/deployment TestReplicaSetOrphaningAndAdoptionWhenLabelsChange
k8s.io/kubernetes/test/integration/deployment TestRollbackDeploymentRSNoRevision
k8s.io/kubernetes/test/integration/deployment TestScalePausedDeployment
k8s.io/kubernetes/test/integration/deployment TestScaledRolloutDeployment
k8s.io/kubernetes/test/integration/deployment TestSpecReplicasChange
k8s.io/kubernetes/test/integration/dryrun TestDryRun
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=configmaps
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=endpoints
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=events
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=limitranges
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=namespaces
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=nodes
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=persistentvolumeclaims
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=persistentvolumes
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=pods
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=podtemplates
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=replicationcontrollers
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=resourcequotas
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=secrets
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=serviceaccounts
k8s.io/kubernetes/test/integration/dryrun TestDryRun//v1,_Resource=services
k8s.io/kubernetes/test/integration/dryrun TestDryRun/admissionregistration.k8s.io/v1alpha1,_Resource=initializerconfigurations
k8s.io/kubernetes/test/integration/dryrun TestDryRun/admissionregistration.k8s.io/v1beta1,_Resource=mutatingwebhookconfigurations
k8s.io/kubernetes/test/integration/dryrun TestDryRun/admissionregistration.k8s.io/v1beta1,_Resource=validatingwebhookconfigurations
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apiextensions.k8s.io/v1beta1,_Resource=customresourcedefinitions
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apiregistration.k8s.io/v1,_Resource=apiservices
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apiregistration.k8s.io/v1beta1,_Resource=apiservices
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1,_Resource=controllerrevisions
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1,_Resource=daemonsets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1,_Resource=deployments
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1,_Resource=replicasets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1,_Resource=statefulsets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta1,_Resource=controllerrevisions
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta1,_Resource=deployments
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta1,_Resource=statefulsets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta2,_Resource=controllerrevisions
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta2,_Resource=daemonsets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta2,_Resource=deployments
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta2,_Resource=replicasets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/apps/v1beta2,_Resource=statefulsets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/auditregistration.k8s.io/v1alpha1,_Resource=auditsinks
k8s.io/kubernetes/test/integration/dryrun TestDryRun/autoscaling/v1,_Resource=horizontalpodautoscalers
k8s.io/kubernetes/test/integration/dryrun TestDryRun/autoscaling/v2beta1,_Resource=horizontalpodautoscalers
k8s.io/kubernetes/test/integration/dryrun TestDryRun/autoscaling/v2beta2,_Resource=horizontalpodautoscalers
k8s.io/kubernetes/test/integration/dryrun TestDryRun/awesome.bears.com/v1,_Resource=pandas
k8s.io/kubernetes/test/integration/dryrun TestDryRun/awesome.bears.com/v3,_Resource=pandas
k8s.io/kubernetes/test/integration/dryrun TestDryRun/batch/v1,_Resource=jobs
k8s.io/kubernetes/test/integration/dryrun TestDryRun/batch/v1beta1,_Resource=cronjobs
k8s.io/kubernetes/test/integration/dryrun TestDryRun/batch/v2alpha1,_Resource=cronjobs
k8s.io/kubernetes/test/integration/dryrun TestDryRun/certificates.k8s.io/v1beta1,_Resource=certificatesigningrequests
k8s.io/kubernetes/test/integration/dryrun TestDryRun/coordination.k8s.io/v1beta1,_Resource=leases
k8s.io/kubernetes/test/integration/dryrun TestDryRun/cr.bar.com/v1,_Resource=foos
k8s.io/kubernetes/test/integration/dryrun TestDryRun/custom.fancy.com/v2,_Resource=pants
k8s.io/kubernetes/test/integration/dryrun TestDryRun/events.k8s.io/v1beta1,_Resource=events
k8s.io/kubernetes/test/integration/dryrun TestDryRun/extensions/v1beta1,_Resource=daemonsets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/extensions/v1beta1,_Resource=deployments
k8s.io/kubernetes/test/integration/dryrun TestDryRun/extensions/v1beta1,_Resource=ingresses
k8s.io/kubernetes/test/integration/dryrun TestDryRun/extensions/v1beta1,_Resource=networkpolicies
k8s.io/kubernetes/test/integration/dryrun TestDryRun/extensions/v1beta1,_Resource=podsecuritypolicies
k8s.io/kubernetes/test/integration/dryrun TestDryRun/extensions/v1beta1,_Resource=replicasets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/networking.k8s.io/v1,_Resource=networkpolicies
k8s.io/kubernetes/test/integration/dryrun TestDryRun/policy/v1beta1,_Resource=poddisruptionbudgets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/policy/v1beta1,_Resource=podsecuritypolicies
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1,_Resource=clusterrolebindings
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1,_Resource=clusterroles
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1,_Resource=rolebindings
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1,_Resource=roles
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1alpha1,_Resource=clusterrolebindings
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1alpha1,_Resource=clusterroles
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1alpha1,_Resource=rolebindings
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1alpha1,_Resource=roles
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1beta1,_Resource=clusterrolebindings
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1beta1,_Resource=clusterroles
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1beta1,_Resource=rolebindings
k8s.io/kubernetes/test/integration/dryrun TestDryRun/rbac.authorization.k8s.io/v1beta1,_Resource=roles
k8s.io/kubernetes/test/integration/dryrun TestDryRun/scheduling.k8s.io/v1alpha1,_Resource=priorityclasses
k8s.io/kubernetes/test/integration/dryrun TestDryRun/scheduling.k8s.io/v1beta1,_Resource=priorityclasses
k8s.io/kubernetes/test/integration/dryrun TestDryRun/settings.k8s.io/v1alpha1,_Resource=podpresets
k8s.io/kubernetes/test/integration/dryrun TestDryRun/storage.k8s.io/v1,_Resource=storageclasses
k8s.io/kubernetes/test/integration/dryrun TestDryRun/storage.k8s.io/v1,_Resource=volumeattachments
k8s.io/kubernetes/test/integration/dryrun TestDryRun/storage.k8s.io/v1alpha1,_Resource=volumeattachments
k8s.io/kubernetes/test/integration/dryrun TestDryRun/storage.k8s.io/v1beta1,_Resource=storageclasses
k8s.io/kubernetes/test/integration/dryrun TestDryRun/storage.k8s.io/v1beta1,_Resource=volumeattachments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=configmaps
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=endpoints
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=events
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=limitranges
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=namespaces
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=nodes
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=persistentvolumeclaims
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=persistentvolumes
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=pods
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=podtemplates
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=replicationcontrollers
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=resourcequotas
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=secrets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=serviceaccounts
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath//v1,_Resource=services
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/admissionregistration.k8s.io/v1alpha1,_Resource=initializerconfigurations
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/admissionregistration.k8s.io/v1beta1,_Resource=mutatingwebhookconfigurations
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/admissionregistration.k8s.io/v1beta1,_Resource=validatingwebhookconfigurations
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apiextensions.k8s.io/v1beta1,_Resource=customresourcedefinitions
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apiregistration.k8s.io/v1,_Resource=apiservices
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apiregistration.k8s.io/v1beta1,_Resource=apiservices
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1,_Resource=controllerrevisions
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1,_Resource=daemonsets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1,_Resource=deployments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1,_Resource=replicasets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1,_Resource=statefulsets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta1,_Resource=controllerrevisions
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta1,_Resource=deployments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta1,_Resource=statefulsets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta2,_Resource=controllerrevisions
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta2,_Resource=daemonsets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta2,_Resource=deployments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta2,_Resource=replicasets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/apps/v1beta2,_Resource=statefulsets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/auditregistration.k8s.io/v1alpha1,_Resource=auditsinks
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/autoscaling/v1,_Resource=horizontalpodautoscalers
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/autoscaling/v2beta1,_Resource=horizontalpodautoscalers
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/autoscaling/v2beta2,_Resource=horizontalpodautoscalers
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/awesome.bears.com/v1,_Resource=pandas
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/awesome.bears.com/v3,_Resource=pandas
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/batch/v1,_Resource=jobs
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/batch/v1beta1,_Resource=cronjobs
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/batch/v2alpha1,_Resource=cronjobs
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/certificates.k8s.io/v1beta1,_Resource=certificatesigningrequests
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/coordination.k8s.io/v1beta1,_Resource=leases
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/cr.bar.com/v1,_Resource=foos
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/custom.fancy.com/v2,_Resource=pants
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/events.k8s.io/v1beta1,_Resource=events
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/extensions/v1beta1,_Resource=daemonsets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/extensions/v1beta1,_Resource=deployments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/extensions/v1beta1,_Resource=ingresses
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/extensions/v1beta1,_Resource=networkpolicies
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/extensions/v1beta1,_Resource=podsecuritypolicies
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/extensions/v1beta1,_Resource=replicasets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/networking.k8s.io/v1,_Resource=networkpolicies
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/policy/v1beta1,_Resource=poddisruptionbudgets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/policy/v1beta1,_Resource=podsecuritypolicies
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1,_Resource=clusterrolebindings
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1,_Resource=clusterroles
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1,_Resource=rolebindings
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1,_Resource=roles
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1alpha1,_Resource=clusterrolebindings
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1alpha1,_Resource=clusterroles
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1alpha1,_Resource=rolebindings
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1alpha1,_Resource=roles
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1beta1,_Resource=clusterrolebindings
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1beta1,_Resource=clusterroles
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1beta1,_Resource=rolebindings
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/rbac.authorization.k8s.io/v1beta1,_Resource=roles
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/scheduling.k8s.io/v1alpha1,_Resource=priorityclasses
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/scheduling.k8s.io/v1beta1,_Resource=priorityclasses
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/settings.k8s.io/v1alpha1,_Resource=podpresets
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/storage.k8s.io/v1,_Resource=storageclasses
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/storage.k8s.io/v1,_Resource=volumeattachments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/storage.k8s.io/v1alpha1,_Resource=volumeattachments
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/storage.k8s.io/v1beta1,_Resource=storageclasses
k8s.io/kubernetes/test/integration/etcd TestEtcdStoragePath/storage.k8s.io/v1beta1,_Resource=volumeattachments
k8s.io/kubernetes/test/integration/evictions TestConcurrentEvictionRequests
k8s.io/kubernetes/test/integration/evictions TestTerminalPodEviction
k8s.io/kubernetes/test/integration/examples TestAggregatedAPIServer
k8s.io/kubernetes/test/integration/examples TestWebhookLoopback
k8s.io/kubernetes/test/integration/garbagecollector TestBlockingOwnerRefDoesBlock
k8s.io/kubernetes/test/integration/garbagecollector TestCRDDeletionCascading
k8s.io/kubernetes/test/integration/garbagecollector TestCascadingDeletion
k8s.io/kubernetes/test/integration/garbagecollector TestClusterScopedOwners
k8s.io/kubernetes/test/integration/garbagecollector TestCreateWithNonExistentOwner
k8s.io/kubernetes/test/integration/garbagecollector TestCustomResourceCascadingDeletion
k8s.io/kubernetes/test/integration/garbagecollector TestMixedRelationships
k8s.io/kubernetes/test/integration/garbagecollector TestNonBlockingOwnerRefDoesNotBlock
k8s.io/kubernetes/test/integration/garbagecollector TestOrphaning
k8s.io/kubernetes/test/integration/garbagecollector TestSolidOwnerDoesNotBlockWaitingOwner
k8s.io/kubernetes/test/integration/garbagecollector TestStressingCascadingDeletion
k8s.io/kubernetes/test/integration/master TestAccept
k8s.io/kubernetes/test/integration/master TestAppsGroupBackwardCompatibility
k8s.io/kubernetes/test/integration/master TestAppsPrefix
k8s.io/kubernetes/test/integration/master TestAudit
k8s.io/kubernetes/test/integration/master TestAutoscalingGroupBackwardCompatibility
k8s.io/kubernetes/test/integration/master TestAutoscalingPrefix
k8s.io/kubernetes/test/integration/master TestBatchPrefix
k8s.io/kubernetes/test/integration/master TestCRD
k8s.io/kubernetes/test/integration/master TestCRDShadowGroup
k8s.io/kubernetes/test/integration/master TestEmptyList
k8s.io/kubernetes/test/integration/master TestExtensionsPrefix
k8s.io/kubernetes/test/integration/master TestKMSProvider
k8s.io/kubernetes/test/integration/master TestKubernetesService
k8s.io/kubernetes/test/integration/master TestMasterService
k8s.io/kubernetes/test/integration/master TestObjectSizeResponses
k8s.io/kubernetes/test/integration/master TestObjectSizeResponses/1_MB
k8s.io/kubernetes/test/integration/master TestObjectSizeResponses/1_MB#01
k8s.io/kubernetes/test/integration/master TestObjectSizeResponses/1_MB#02
k8s.io/kubernetes/test/integration/master TestObjectSizeResponses/2_MB
k8s.io/kubernetes/test/integration/master TestObjectSizeResponses/2_MB#01
k8s.io/kubernetes/test/integration/master TestOpenAPIDelegationChainPlumbing
k8s.io/kubernetes/test/integration/master TestReconcilerMasterLeaseCombined
k8s.io/kubernetes/test/integration/master TestReconcilerMasterLeaseMultiCombined
k8s.io/kubernetes/test/integration/master TestReconcilerMasterLeaseMultiMoreMasters
k8s.io/kubernetes/test/integration/master TestRun
k8s.io/kubernetes/test/integration/master TestSecretsShouldBeTransformed
k8s.io/kubernetes/test/integration/master TestServiceAlloc
k8s.io/kubernetes/test/integration/master TestStatus
k8s.io/kubernetes/test/integration/master TestWatchSucceedsWithoutArgs
k8s.io/kubernetes/test/integration/metrics TestApiserverMetrics
k8s.io/kubernetes/test/integration/metrics TestMasterProcessMetrics
k8s.io/kubernetes/test/integration/objectmeta TestIgnoreClusterName
k8s.io/kubernetes/test/integration/openshift TestMasterExportsSymbols
k8s.io/kubernetes/test/integration/pods TestPodReadOnlyFilesystem
k8s.io/kubernetes/test/integration/pods TestPodUpdateActiveDeadlineSeconds
k8s.io/kubernetes/test/integration/quota TestQuota
k8s.io/kubernetes/test/integration/quota TestQuotaLimitedResourceDenial
k8s.io/kubernetes/test/integration/replicaset TestAdoption
k8s.io/kubernetes/test/integration/replicaset TestDeletingAndFailedPods
k8s.io/kubernetes/test/integration/replicaset TestExtraPodsAdoptionAndDeletion
k8s.io/kubernetes/test/integration/replicaset TestFullyLabeledReplicas
k8s.io/kubernetes/test/integration/replicaset TestGeneralPodAdoption
k8s.io/kubernetes/test/integration/replicaset TestOverlappingRSs
k8s.io/kubernetes/test/integration/replicaset TestPodOrphaningAndAdoptionWhenLabelsChange
k8s.io/kubernetes/test/integration/replicaset TestRSScaleSubresource
k8s.io/kubernetes/test/integration/replicaset TestRSSelectorImmutability
k8s.io/kubernetes/test/integration/replicaset TestReadyAndAvailableReplicas
k8s.io/kubernetes/test/integration/replicaset TestReplicaSetsAppsV1DefaultGCPolicy
k8s.io/kubernetes/test/integration/replicaset TestReplicaSetsExtensionsV1beta1DefaultGCPolicy
k8s.io/kubernetes/test/integration/replicaset TestSpecReplicasChange
k8s.io/kubernetes/test/integration/replicationcontroller TestAdoption
k8s.io/kubernetes/test/integration/replicationcontroller TestDeletingAndFailedPods
k8s.io/kubernetes/test/integration/replicationcontroller TestExtraPodsAdoptionAndDeletion
k8s.io/kubernetes/test/integration/replicationcontroller TestFullyLabeledReplicas
k8s.io/kubernetes/test/integration/replicationcontroller TestGeneralPodAdoption
k8s.io/kubernetes/test/integration/replicationcontroller TestOverlappingRCs
k8s.io/kubernetes/test/integration/replicationcontroller TestPodOrphaningAndAdoptionWhenLabelsChange
k8s.io/kubernetes/test/integration/replicationcontroller TestRCScaleSubresource
k8s.io/kubernetes/test/integration/replicationcontroller TestReadyAndAvailableReplicas
k8s.io/kubernetes/test/integration/replicationcontroller TestSpecReplicasChange
k8s.io/kubernetes/test/integration/scale TestScaleSubresources
k8s.io/kubernetes/test/integration/scheduler TestAllocatable
k8s.io/kubernetes/test/integration/scheduler TestContextCleanup
k8s.io/kubernetes/test/integration/scheduler TestDisablePreemption
k8s.io/kubernetes/test/integration/scheduler TestImageLocality
k8s.io/kubernetes/test/integration/scheduler TestInterPodAffinity
k8s.io/kubernetes/test/integration/scheduler TestMultiScheduler
k8s.io/kubernetes/test/integration/scheduler TestNodeAffinity
k8s.io/kubernetes/test/integration/scheduler TestNominatedNodeCleanUp
k8s.io/kubernetes/test/integration/scheduler TestPDBInPreemption
k8s.io/kubernetes/test/integration/scheduler TestPVAffinityConflict
k8s.io/kubernetes/test/integration/scheduler TestPodAffinity
k8s.io/kubernetes/test/integration/scheduler TestPrebindPlugin
k8s.io/kubernetes/test/integration/scheduler TestPreemption
k8s.io/kubernetes/test/integration/scheduler TestPreemptionStarvation
k8s.io/kubernetes/test/integration/scheduler TestRescheduleProvisioning
k8s.io/kubernetes/test/integration/scheduler TestReservePlugin
k8s.io/kubernetes/test/integration/scheduler TestSchedulerCreationFromConfigMap
k8s.io/kubernetes/test/integration/scheduler TestSchedulerCreationFromNonExistentConfigMap
k8s.io/kubernetes/test/integration/scheduler TestSchedulerExtender
k8s.io/kubernetes/test/integration/scheduler TestSchedulerInformers
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/disk_pressure_node
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/memory_pressure_node
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/multi_taints_on_node
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/network_unavailable_and_node_is_not_ready
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/network_unavailable_and_node_is_ready
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/not-ready_node
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/pid_pressure_node
k8s.io/kubernetes/test/integration/scheduler TestTaintNodeByCondition/unschedulable_node
k8s.io/kubernetes/test/integration/scheduler TestUnschedulableNodes
k8s.io/kubernetes/test/integration/scheduler TestVolumeBinding
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingDynamicStressFast
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingDynamicStressSlow
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingRescheduling
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingStress
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingStressWithSchedulerResync
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingWithAffinity
k8s.io/kubernetes/test/integration/scheduler TestVolumeBindingWithAntiAffinity
k8s.io/kubernetes/test/integration/scheduler TestVolumeProvision
k8s.io/kubernetes/test/integration/secrets TestSecrets
k8s.io/kubernetes/test/integration/serviceaccount TestServiceAccountAutoCreate
k8s.io/kubernetes/test/integration/serviceaccount TestServiceAccountTokenAuthentication
k8s.io/kubernetes/test/integration/serviceaccount TestServiceAccountTokenAutoCreate
k8s.io/kubernetes/test/integration/serviceaccount TestServiceAccountTokenAutoMount
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager//healthz_without_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager//metrics_without_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/always-allowed_/metrics_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/authorization_skipped_for_/healthz_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/authorization_skipped_for_/healthz_with_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/insecurely_/healthz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/insecurely_/metrics
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/no-flags
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/not_authorized_/metrics
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/cloud-controller-manager/not_authorized_/metrics_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager//healthz_without_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager//metrics_without_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/always-allowed_/metrics_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/authorization_skipped_for_/healthz_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/authorization_skipped_for_/healthz_with_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/insecurely_/healthz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/insecurely_/metrics
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/no-flags
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/not_authorized_/metrics
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-controller-manager/not_authorized_/metrics_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler//healthz_without_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler//metrics_without_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/always-allowed_/metrics_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/authorization_skipped_for_/healthz_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/authorization_skipped_for_/healthz_with_authn/authz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/insecurely_/healthz
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/insecurely_/metrics
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/no-flags
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/not_authorized_/metrics
k8s.io/kubernetes/test/integration/serving TestComponentSecureServingAndAuth/kube-scheduler/not_authorized_/metrics_with_BROKEN_authn/authz
k8s.io/kubernetes/test/integration/statefulset TestDeletingAndFailedPods
k8s.io/kubernetes/test/integration/statefulset TestSpecReplicasChange
k8s.io/kubernetes/test/integration/storageclasses TestStorageClasses
k8s.io/kubernetes/test/integration/tls TestAPICiphers
k8s.io/kubernetes/test/integration/ttlcontroller TestTTLAnnotations
k8s.io/kubernetes/test/integration/volume TestPVCBoundWithADC
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeBindRace
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeClaimLabelSelector
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeClaimLabelSelectorMatchExpressions
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeControllerStartup
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeDeleter
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeMultiPVs
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeMultiPVsDiffAccessModes
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeMultiPVsPVCs
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeProvisionMultiPVCs
k8s.io/kubernetes/test/integration/volume TestPersistentVolumeRecycler
k8s.io/kubernetes/test/integration/volume TestPodAddedByDswp
k8s.io/kubernetes/test/integration/volume TestPodDeletionWithDswp
k8s.io/kubernetes/test/integration/volume TestPodUpdateWithKeepTerminatedPodVolumes
k8s.io/kubernetes/test/integration/volume TestPodUpdateWithWithADC
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestCRValidationOnCRDUpdate
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestClusterScopedCRUD
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestColumnsPatch
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestCrossNamespaceListWatch
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestCustomResourceUpdateValidation
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestCustomResourceValidation
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestCustomResourceValidationErrors
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestDeRegistrationAndReRegistration
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestDiscovery
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestEtcdStorage
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestFinalization
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestFinalizationAndDeletion
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestForProperValidationErrors
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestForbiddenFieldsInSchema
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestGeneration
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestInternalVersionIsHandlerVersion
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestInvalidObjectMetaInStorage
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestMultipleRegistration
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestMultipleResourceInstances
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestNameConflict
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestNamespaceScopedCRUD
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestNoNamespaceReject
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestPatch
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestPatchCleanTopLevelColumns
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestPostInvalidObjectMeta
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestPreserveInt
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestSameNameDiffNamespace
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestScaleSubresource
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestSelfLink
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestServerUp
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestStatusGetAndPatch
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestStatusSubresource
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestStoragedVersionInClusterScopedCRDStatus
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestStoragedVersionInNamespacedCRDStatus
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestSubresourcePatch
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestSubresourcesDiscovery
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestTableGet
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestValidateOnlyStatus
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestValidationSchemaWithStatus
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestVersionedClusterScopedCRD
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestVersionedNamespacedScopedCRD
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestYAML
k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration TestYAMLSubresource
test-cmd run_RESTMapper_evaluation_tests
test-cmd run_assert_categories_tests
test-cmd run_assert_short_name_tests
test-cmd run_authorization_tests
test-cmd run_certificates_tests
test-cmd run_client_config_tests
test-cmd run_cluster_management_tests
test-cmd run_clusterroles_tests
test-cmd run_cmd_with_img_tests
test-cmd run_configmap_tests
test-cmd run_crd_tests
test-cmd run_create_job_tests
test-cmd run_create_secret_tests
test-cmd run_daemonset_history_tests
test-cmd run_daemonset_tests
test-cmd run_deployment_tests
test-cmd run_impersonation_tests
test-cmd run_job_tests
test-cmd run_kubectl_all_namespace_tests
test-cmd run_kubectl_apply_deployments_tests
test-cmd run_kubectl_apply_tests
test-cmd run_kubectl_config_set_tests
test-cmd run_kubectl_create_error_tests
test-cmd run_kubectl_create_filter_tests
test-cmd run_kubectl_diff_tests
test-cmd run_kubectl_explain_tests
test-cmd run_kubectl_get_tests
test-cmd run_kubectl_local_proxy_tests
test-cmd run_kubectl_old_print_tests
test-cmd run_kubectl_request_timeout_tests
test-cmd run_kubectl_run_tests
test-cmd run_kubectl_sort_by_tests
test-cmd run_kubectl_version_tests
test-cmd run_lists_tests
test-cmd run_multi_resources_tests
test-cmd run_namespace_tests
test-cmd run_nodes_tests
test-cmd run_persistent_volume_claims_tests
test-cmd run_persistent_volumes_tests
test-cmd run_plugins_tests
test-cmd run_pod_templates_tests
test-cmd run_pod_tests
test-cmd run_rc_tests
test-cmd run_recursive_resources_tests
test-cmd run_resource_aliasing_tests
test-cmd run_retrieve_multiple_tests
test-cmd run_role_tests
test-cmd run_rs_tests
test-cmd run_save_config_tests
test-cmd run_secrets_test
test-cmd run_service_accounts_tests
test-cmd run_service_tests
test-cmd run_stateful_set_tests
test-cmd run_statefulset_history_tests
test-cmd run_storage_class_tests
test-cmd run_swagger_tests
test-cmd run_template_output_tests
k8s.io/kubernetes/test/integration/client TestMultiWatch
k8s.io/kubernetes/test/integration/ipamperf TestPerformance
k8s.io/kubernetes/test/integration/master TestUpdateNodeObjects
k8s.io/kubernetes/test/integration/scheduler_perf TestSchedule100Node3KPods
... skipping 10 lines ... I1206 13:03:35.959] process 277 exited with code 0 after 0.3m I1206 13:03:35.959] Call: gcloud config get-value account I1206 13:03:36.317] process 290 exited with code 0 after 0.0m I1206 13:03:36.317] Will upload results to gs://kubernetes-jenkins/logs using pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com I1206 13:03:36.317] Call: kubectl get -oyaml pods/dae36536-f956-11e8-b720-0a580a6c02d1 W1206 13:03:37.662] The connection to the server localhost:8080 was refused - did you specify the right host or port? E1206 13:03:37.666] Command failed I1206 13:03:37.666] process 303 exited with code 1 after 0.0m E1206 13:03:37.666] unable to upload podspecs: Command '['kubectl', 'get', '-oyaml', 'pods/dae36536-f956-11e8-b720-0a580a6c02d1']' returned non-zero exit status 1 I1206 13:03:37.666] Root: /workspace I1206 13:03:37.666] cd to /workspace I1206 13:03:37.667] Checkout: /workspace/k8s.io/kubernetes master to /workspace/k8s.io/kubernetes I1206 13:03:37.667] Call: git init k8s.io/kubernetes ... skipping 795 lines ... W1206 13:11:39.396] I1206 13:11:39.395869 55730 controllermanager.go:516] Started "serviceaccount" W1206 13:11:39.396] I1206 13:11:39.395975 55730 serviceaccounts_controller.go:115] Starting service account controller W1206 13:11:39.396] I1206 13:11:39.395997 55730 controller_utils.go:1027] Waiting for caches to sync for service account controller W1206 13:11:39.397] I1206 13:11:39.396746 55730 controllermanager.go:516] Started "deployment" W1206 13:11:39.397] I1206 13:11:39.396852 55730 deployment_controller.go:152] Starting deployment controller W1206 13:11:39.397] I1206 13:11:39.396892 55730 controller_utils.go:1027] Waiting for caches to sync for deployment controller W1206 13:11:39.399] E1206 13:11:39.399227 55730 core.go:76] Failed to start service controller: WARNING: no cloud provider provided, services of type LoadBalancer will fail W1206 13:11:39.399] W1206 13:11:39.399260 55730 controllermanager.go:508] Skipping "service" W1206 13:11:39.401] I1206 13:11:39.401134 55730 controllermanager.go:516] Started "pv-protection" W1206 13:11:39.401] I1206 13:11:39.401214 55730 pv_protection_controller.go:81] Starting PV protection controller W1206 13:11:39.401] I1206 13:11:39.401256 55730 controller_utils.go:1027] Waiting for caches to sync for PV protection controller W1206 13:11:39.402] I1206 13:11:39.401666 55730 controllermanager.go:516] Started "job" W1206 13:11:39.402] I1206 13:11:39.401698 55730 job_controller.go:143] Starting job controller ... skipping 33 lines ... W1206 13:11:39.413] I1206 13:11:39.411219 55730 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for leases.coordination.k8s.io W1206 13:11:39.413] I1206 13:11:39.411273 55730 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for statefulsets.apps W1206 13:11:39.413] I1206 13:11:39.411339 55730 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for podtemplates W1206 13:11:39.413] I1206 13:11:39.411435 55730 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for controllerrevisions.apps W1206 13:11:39.413] I1206 13:11:39.411485 55730 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for horizontalpodautoscalers.autoscaling W1206 13:11:39.413] I1206 13:11:39.411523 55730 resource_quota_monitor.go:228] QuotaMonitor created object count evaluator for networkpolicies.networking.k8s.io W1206 13:11:39.413] E1206 13:11:39.411565 55730 resource_quota_controller.go:171] initial monitor sync has error: couldn't start monitor for resource "extensions/v1beta1, Resource=networkpolicies": unable to monitor quota for resource "extensions/v1beta1, Resource=networkpolicies" W1206 13:11:39.414] I1206 13:11:39.411589 55730 controllermanager.go:516] Started "resourcequota" W1206 13:11:39.414] W1206 13:11:39.411624 55730 controllermanager.go:508] Skipping "csrsigning" W1206 13:11:39.414] I1206 13:11:39.411628 55730 resource_quota_controller.go:276] Starting resource quota controller W1206 13:11:39.414] W1206 13:11:39.411634 55730 controllermanager.go:508] Skipping "root-ca-cert-publisher" W1206 13:11:39.414] I1206 13:11:39.411643 55730 controller_utils.go:1027] Waiting for caches to sync for resource quota controller W1206 13:11:39.414] I1206 13:11:39.411720 55730 resource_quota_monitor.go:301] QuotaMonitor running ... skipping 12 lines ... W1206 13:11:39.418] I1206 13:11:39.417798 55730 controllermanager.go:516] Started "persistentvolume-expander" W1206 13:11:39.418] I1206 13:11:39.417822 55730 expand_controller.go:153] Starting expand controller W1206 13:11:39.418] I1206 13:11:39.417838 55730 controller_utils.go:1027] Waiting for caches to sync for expand controller W1206 13:11:39.419] I1206 13:11:39.419148 55730 controllermanager.go:516] Started "endpoint" W1206 13:11:39.419] I1206 13:11:39.419345 55730 endpoints_controller.go:149] Starting endpoint controller W1206 13:11:39.419] I1206 13:11:39.419394 55730 controller_utils.go:1027] Waiting for caches to sync for endpoint controller W1206 13:11:39.420] W1206 13:11:39.419443 55730 garbagecollector.go:649] failed to discover preferred resources: the cache has not been filled yet W1206 13:11:39.420] I1206 13:11:39.420252 55730 garbagecollector.go:133] Starting garbage collector controller W1206 13:11:39.420] I1206 13:11:39.420275 55730 controller_utils.go:1027] Waiting for caches to sync for garbage collector controller W1206 13:11:39.421] I1206 13:11:39.420253 55730 controllermanager.go:516] Started "garbagecollector" W1206 13:11:39.421] I1206 13:11:39.420429 55730 graph_builder.go:308] GraphBuilder running W1206 13:11:39.421] I1206 13:11:39.420982 55730 controllermanager.go:516] Started "daemonset" W1206 13:11:39.421] I1206 13:11:39.421001 55730 daemon_controller.go:268] Starting daemon sets controller ... skipping 36 lines ... W1206 13:11:39.520] I1206 13:11:39.519636 55730 controller_utils.go:1034] Caches are synced for endpoint controller W1206 13:11:39.523] I1206 13:11:39.523135 55730 controller_utils.go:1034] Caches are synced for HPA controller W1206 13:11:39.524] I1206 13:11:39.524359 55730 controller_utils.go:1034] Caches are synced for stateful set controller W1206 13:11:39.525] I1206 13:11:39.525213 55730 controller_utils.go:1034] Caches are synced for PVC protection controller W1206 13:11:39.531] I1206 13:11:39.531266 55730 controller_utils.go:1034] Caches are synced for namespace controller W1206 13:11:39.533] I1206 13:11:39.533145 55730 controller_utils.go:1034] Caches are synced for ClusterRoleAggregator controller W1206 13:11:39.544] E1206 13:11:39.543787 55730 clusterroleaggregation_controller.go:180] admin failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "admin": the object has been modified; please apply your changes to the latest version and try again W1206 13:11:39.545] E1206 13:11:39.545161 55730 clusterroleaggregation_controller.go:180] edit failed with : Operation cannot be fulfilled on clusterroles.rbac.authorization.k8s.io "edit": the object has been modified; please apply your changes to the latest version and try again W1206 13:11:39.602] I1206 13:11:39.601497 55730 controller_utils.go:1034] Caches are synced for PV protection controller W1206 13:11:39.604] I1206 13:11:39.604204 55730 controller_utils.go:1034] Caches are synced for persistent volume controller W1206 13:11:39.617] I1206 13:11:39.617403 55730 controller_utils.go:1034] Caches are synced for attach detach controller W1206 13:11:39.618] I1206 13:11:39.618111 55730 controller_utils.go:1034] Caches are synced for expand controller W1206 13:11:39.677] W1206 13:11:39.677137 55730 actual_state_of_world.go:491] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="127.0.0.1" does not exist W1206 13:11:39.721] I1206 13:11:39.721203 55730 controller_utils.go:1034] Caches are synced for daemon sets controller W1206 13:11:39.732] I1206 13:11:39.732408 55730 controller_utils.go:1034] Caches are synced for taint controller W1206 13:11:39.733] I1206 13:11:39.732550 55730 taint_manager.go:198] Starting NoExecuteTaintManager W1206 13:11:39.733] I1206 13:11:39.732562 55730 node_lifecycle_controller.go:1222] Initializing eviction metric for zone: W1206 13:11:39.733] I1206 13:11:39.732666 55730 node_lifecycle_controller.go:1072] Controller detected that all Nodes are not-Ready. Entering master disruption mode. W1206 13:11:39.734] I1206 13:11:39.732761 55730 event.go:221] Event(v1.ObjectReference{Kind:"Node", Namespace:"", Name:"127.0.0.1", UID:"77c67b71-f958-11e8-9847-0242ac110002", APIVersion:"", ResourceVersion:"", FieldPath:""}): type: 'Normal' reason: 'RegisteredNode' Node 127.0.0.1 event: Registered Node 127.0.0.1 in Controller ... skipping 34 lines ... I1206 13:11:40.692] [m[32mSuccessful: --client --output json has no server info I1206 13:11:40.694] [m+++ [1206 13:11:40] Testing kubectl version: compare json output using additional --short flag I1206 13:11:40.816] [32mSuccessful: --short --output client json info is equal to non short result I1206 13:11:40.821] [m[32mSuccessful: --short --output server json info is equal to non short result I1206 13:11:40.823] [m+++ [1206 13:11:40] Testing kubectl version: compare json output with yaml output W1206 13:11:40.923] I1206 13:11:40.893479 55730 controller_utils.go:1027] Waiting for caches to sync for garbage collector controller W1206 13:11:40.924] E1206 13:11:40.906248 55730 resource_quota_controller.go:437] failed to sync resource monitors: couldn't start monitor for resource "extensions/v1beta1, Resource=networkpolicies": unable to monitor quota for resource "extensions/v1beta1, Resource=networkpolicies" W1206 13:11:40.924] I1206 13:11:40.920490 55730 controller_utils.go:1034] Caches are synced for garbage collector controller W1206 13:11:40.924] I1206 13:11:40.920524 55730 garbagecollector.go:142] Garbage collector: all resource monitors have synced. Proceeding to collect garbage W1206 13:11:40.994] I1206 13:11:40.993841 55730 controller_utils.go:1034] Caches are synced for garbage collector controller I1206 13:11:41.094] [32mSuccessful: --output json/yaml has identical information I1206 13:11:41.095] [m+++ exit code: 0 I1206 13:11:41.095] Recording: run_kubectl_config_set_tests ... skipping 42 lines ... I1206 13:11:43.363] +++ working dir: /go/src/k8s.io/kubernetes I1206 13:11:43.364] +++ command: run_RESTMapper_evaluation_tests I1206 13:11:43.373] +++ [1206 13:11:43] Creating namespace namespace-1544101903-7469 I1206 13:11:43.435] namespace/namespace-1544101903-7469 created I1206 13:11:43.492] Context "test" modified. I1206 13:11:43.496] +++ [1206 13:11:43] Testing RESTMapper I1206 13:11:43.594] +++ [1206 13:11:43] "kubectl get unknownresourcetype" returns error as expected: error: the server doesn't have a resource type "unknownresourcetype" I1206 13:11:43.605] +++ exit code: 0 I1206 13:11:43.702] NAME SHORTNAMES APIGROUP NAMESPACED KIND I1206 13:11:43.702] bindings true Binding I1206 13:11:43.702] componentstatuses cs false ComponentStatus I1206 13:11:43.703] configmaps cm true ConfigMap I1206 13:11:43.703] endpoints ep true Endpoints ... skipping 606 lines ... I1206 13:12:00.757] [mpoddisruptionbudget.policy/test-pdb-3 created I1206 13:12:00.836] [32mcore.sh:251: Successful get pdb/test-pdb-3 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 2 I1206 13:12:00.897] [mpoddisruptionbudget.policy/test-pdb-4 created I1206 13:12:00.978] [32mcore.sh:255: Successful get pdb/test-pdb-4 --namespace=test-kubectl-describe-pod {{.spec.maxUnavailable}}: 50% I1206 13:12:01.114] [m[32mcore.sh:261: Successful get pods --namespace=test-kubectl-describe-pod {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:01.271] [mpod/env-test-pod created W1206 13:12:01.371] error: resource(s) were provided, but no name, label selector, or --all flag specified W1206 13:12:01.371] error: setting 'all' parameter but found a non empty selector. W1206 13:12:01.372] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. W1206 13:12:01.372] I1206 13:12:00.470492 52364 controller.go:608] quota admission added evaluator for: poddisruptionbudgets.policy W1206 13:12:01.372] error: min-available and max-unavailable cannot be both specified I1206 13:12:01.472] [32mcore.sh:264: Successful describe pods --namespace=test-kubectl-describe-pod env-test-pod: I1206 13:12:01.473] Name: env-test-pod I1206 13:12:01.473] Namespace: test-kubectl-describe-pod I1206 13:12:01.473] Priority: 0 I1206 13:12:01.473] PriorityClassName: <none> I1206 13:12:01.473] Node: <none> ... skipping 145 lines ... W1206 13:12:12.562] I1206 13:12:12.059747 55730 namespace_controller.go:171] Namespace has been deleted test-kubectl-describe-pod W1206 13:12:12.562] I1206 13:12:12.154490 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544101927-25931", Name:"modified", UID:"8b220857-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"367", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: modified-mg2b9 I1206 13:12:12.681] [32mcore.sh:434: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:12.810] [mpod/valid-pod created I1206 13:12:12.895] [32mcore.sh:438: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: I1206 13:12:13.028] [mSuccessful I1206 13:12:13.029] message:Error from server: cannot restore map from string I1206 13:12:13.029] has:cannot restore map from string I1206 13:12:13.101] Successful I1206 13:12:13.102] message:pod/valid-pod patched (no change) I1206 13:12:13.102] has:patched (no change) I1206 13:12:13.173] pod/valid-pod patched I1206 13:12:13.259] [32mcore.sh:455: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx: ... skipping 5 lines ... I1206 13:12:13.725] [mpod/valid-pod patched I1206 13:12:13.813] [32mcore.sh:470: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: changed-with-yaml: I1206 13:12:13.879] [mpod/valid-pod patched I1206 13:12:13.963] [32mcore.sh:475: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:3.1: I1206 13:12:14.102] [mpod/valid-pod patched I1206 13:12:14.188] [32mcore.sh:491: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx: I1206 13:12:14.336] [m+++ [1206 13:12:14] "kubectl patch with resourceVersion 486" returns error as expected: Error from server (Conflict): Operation cannot be fulfilled on pods "valid-pod": the object has been modified; please apply your changes to the latest version and try again W1206 13:12:14.437] E1206 13:12:13.022704 52364 status.go:64] apiserver received an error that is not an metav1.Status: &errors.errorString{s:"cannot restore map from string"} I1206 13:12:14.547] pod "valid-pod" deleted I1206 13:12:14.556] pod/valid-pod replaced I1206 13:12:14.643] [32mcore.sh:515: Successful get pod valid-pod {{(index .spec.containers 0).name}}: replaced-k8s-serve-hostname I1206 13:12:14.776] [mSuccessful I1206 13:12:14.777] message:error: --grace-period must have --force specified I1206 13:12:14.777] has:\-\-grace-period must have \-\-force specified I1206 13:12:14.904] Successful I1206 13:12:14.905] message:error: --timeout must have --force specified I1206 13:12:14.905] has:\-\-timeout must have \-\-force specified W1206 13:12:15.032] W1206 13:12:15.032175 55730 actual_state_of_world.go:491] Failed to update statusUpdateNeeded field in actual state of world: Failed to set statusUpdateNeeded to needed true, because nodeName="node-v1-test" does not exist I1206 13:12:15.133] node/node-v1-test created I1206 13:12:15.170] node/node-v1-test replaced I1206 13:12:15.255] [32mcore.sh:552: Successful get node node-v1-test {{.metadata.annotations.a}}: b I1206 13:12:15.325] [mnode "node-v1-test" deleted I1206 13:12:15.414] [32mcore.sh:559: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: nginx: I1206 13:12:15.653] [m[32mcore.sh:562: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: k8s.gcr.io/serve_hostname: ... skipping 58 lines ... I1206 13:12:20.028] [32msave-config.sh:31: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:20.158] [mpod/test-pod created W1206 13:12:20.259] Edit cancelled, no changes made. W1206 13:12:20.259] Edit cancelled, no changes made. W1206 13:12:20.259] Edit cancelled, no changes made. W1206 13:12:20.259] Edit cancelled, no changes made. W1206 13:12:20.259] error: 'name' already has a value (valid-pod), and --overwrite is false W1206 13:12:20.259] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. W1206 13:12:20.260] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply I1206 13:12:20.360] pod "test-pod" deleted I1206 13:12:20.360] +++ [1206 13:12:20] Creating namespace namespace-1544101940-5506 I1206 13:12:20.378] namespace/namespace-1544101940-5506 created I1206 13:12:20.441] Context "test" modified. ... skipping 41 lines ... I1206 13:12:23.156] +++ Running case: test-cmd.run_kubectl_create_error_tests I1206 13:12:23.158] +++ working dir: /go/src/k8s.io/kubernetes I1206 13:12:23.160] +++ command: run_kubectl_create_error_tests I1206 13:12:23.170] +++ [1206 13:12:23] Creating namespace namespace-1544101943-11292 I1206 13:12:23.235] namespace/namespace-1544101943-11292 created I1206 13:12:23.301] Context "test" modified. I1206 13:12:23.306] +++ [1206 13:12:23] Testing kubectl create with error W1206 13:12:23.407] Error: required flag(s) "filename" not set W1206 13:12:23.407] W1206 13:12:23.407] W1206 13:12:23.407] Examples: W1206 13:12:23.407] # Create a pod using the data in pod.json. W1206 13:12:23.407] kubectl create -f ./pod.json W1206 13:12:23.407] ... skipping 38 lines ... W1206 13:12:23.413] kubectl create -f FILENAME [options] W1206 13:12:23.413] W1206 13:12:23.413] Use "kubectl <command> --help" for more information about a given command. W1206 13:12:23.414] Use "kubectl options" for a list of global command-line options (applies to all commands). W1206 13:12:23.414] W1206 13:12:23.414] required flag(s) "filename" not set I1206 13:12:23.514] +++ [1206 13:12:23] "kubectl create with empty string list returns error as expected: error: error validating "hack/testdata/invalid-rc-with-empty-args.yaml": error validating data: ValidationError(ReplicationController.spec.template.spec.containers[0].args): unknown object type "nil" in ReplicationController.spec.template.spec.containers[0].args[0]; if you choose to ignore these errors, turn validation off with --validate=false W1206 13:12:23.615] kubectl convert is DEPRECATED and will be removed in a future version. W1206 13:12:23.615] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version. I1206 13:12:23.716] +++ exit code: 0 I1206 13:12:23.716] Recording: run_kubectl_apply_tests I1206 13:12:23.716] Running command: run_kubectl_apply_tests I1206 13:12:23.716] ... skipping 13 lines ... I1206 13:12:24.587] [32mapply.sh:47: Successful get deployments {{range.items}}{{.metadata.name}}{{end}}: test-deployment-retainkeys I1206 13:12:25.383] [mdeployment.extensions "test-deployment-retainkeys" deleted I1206 13:12:25.466] [32mapply.sh:67: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:25.598] [mpod/selector-test-pod created I1206 13:12:25.685] [32mapply.sh:71: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod I1206 13:12:25.761] [mSuccessful I1206 13:12:25.762] message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found I1206 13:12:25.762] has:pods "selector-test-pod-dont-apply" not found I1206 13:12:25.832] pod "selector-test-pod" deleted I1206 13:12:25.917] [32mapply.sh:80: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:26.108] [mpod/test-pod created (server dry run) I1206 13:12:26.190] [32mapply.sh:85: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:26.328] [mpod/test-pod created ... skipping 12 lines ... W1206 13:12:26.984] I1206 13:12:26.983470 52364 clientconn.go:551] parsed scheme: "" W1206 13:12:26.984] I1206 13:12:26.983504 52364 clientconn.go:557] scheme "" not registered, fallback to default scheme W1206 13:12:26.985] I1206 13:12:26.983539 52364 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}] W1206 13:12:26.985] I1206 13:12:26.983570 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:12:26.985] I1206 13:12:26.984042 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:12:27.055] I1206 13:12:27.054997 52364 controller.go:608] quota admission added evaluator for: resources.mygroup.example.com W1206 13:12:27.133] Error from server (NotFound): resources.mygroup.example.com "myobj" not found I1206 13:12:27.233] kind.mygroup.example.com/myobj created (server dry run) I1206 13:12:27.234] customresourcedefinition.apiextensions.k8s.io "resources.mygroup.example.com" deleted I1206 13:12:27.291] [32mapply.sh:129: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:27.428] [mpod/a created I1206 13:12:28.918] [32mapply.sh:134: Successful get pods a {{.metadata.name}}: a I1206 13:12:28.993] [mSuccessful I1206 13:12:28.994] message:Error from server (NotFound): pods "b" not found I1206 13:12:28.994] has:pods "b" not found I1206 13:12:29.133] pod/b created I1206 13:12:29.145] pod/a pruned I1206 13:12:30.822] [32mapply.sh:142: Successful get pods b {{.metadata.name}}: b I1206 13:12:30.896] [mSuccessful I1206 13:12:30.896] message:Error from server (NotFound): pods "a" not found I1206 13:12:30.896] has:pods "a" not found I1206 13:12:30.966] pod "b" deleted I1206 13:12:31.048] [32mapply.sh:152: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:31.193] [mpod/a created I1206 13:12:31.278] [32mapply.sh:157: Successful get pods a {{.metadata.name}}: a I1206 13:12:31.352] [mSuccessful I1206 13:12:31.352] message:Error from server (NotFound): pods "b" not found I1206 13:12:31.352] has:pods "b" not found I1206 13:12:31.490] pod/b created I1206 13:12:31.574] [32mapply.sh:165: Successful get pods a {{.metadata.name}}: a I1206 13:12:31.652] [m[32mapply.sh:166: Successful get pods b {{.metadata.name}}: b I1206 13:12:31.722] [mpod "a" deleted I1206 13:12:31.726] pod "b" deleted I1206 13:12:31.868] Successful I1206 13:12:31.868] message:error: all resources selected for prune without explicitly passing --all. To prune all resources, pass the --all flag. If you did not mean to prune all resources, specify a label selector I1206 13:12:31.869] has:all resources selected for prune without explicitly passing --all I1206 13:12:32.004] pod/a created I1206 13:12:32.011] pod/b created I1206 13:12:32.017] service/prune-svc created I1206 13:12:33.504] [32mapply.sh:178: Successful get pods a {{.metadata.name}}: a I1206 13:12:33.582] [m[32mapply.sh:179: Successful get pods b {{.metadata.name}}: b ... skipping 126 lines ... I1206 13:12:45.364] Context "test" modified. I1206 13:12:45.369] +++ [1206 13:12:45] Testing kubectl create filter I1206 13:12:45.449] [32mcreate.sh:30: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:45.585] [mpod/selector-test-pod created I1206 13:12:45.667] [32mcreate.sh:34: Successful get pods selector-test-pod {{.metadata.labels.name}}: selector-test-pod I1206 13:12:45.742] [mSuccessful I1206 13:12:45.743] message:Error from server (NotFound): pods "selector-test-pod-dont-apply" not found I1206 13:12:45.743] has:pods "selector-test-pod-dont-apply" not found I1206 13:12:45.813] pod "selector-test-pod" deleted I1206 13:12:45.830] +++ exit code: 0 I1206 13:12:45.865] Recording: run_kubectl_apply_deployments_tests I1206 13:12:45.865] Running command: run_kubectl_apply_deployments_tests I1206 13:12:45.882] ... skipping 28 lines ... I1206 13:12:47.628] [m[32mapps.sh:138: Successful get replicasets {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:47.706] [m[32mapps.sh:139: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:47.785] [m[32mapps.sh:143: Successful get deployments {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:12:47.922] [mdeployment.extensions/nginx created I1206 13:12:48.009] [32mapps.sh:147: Successful get deployment nginx {{.metadata.name}}: nginx I1206 13:12:52.188] [mSuccessful I1206 13:12:52.188] message:Error from server (Conflict): error when applying patch: I1206 13:12:52.188] {"metadata":{"annotations":{"kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1544101965-21293\",\"resourceVersion\":\"99\"},\"spec\":{\"replicas\":3,\"selector\":{\"matchLabels\":{\"name\":\"nginx2\"}},\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx2\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"},"resourceVersion":"99"},"spec":{"selector":{"matchLabels":{"name":"nginx2"}},"template":{"metadata":{"labels":{"name":"nginx2"}}}}} I1206 13:12:52.188] to: I1206 13:12:52.188] Resource: "extensions/v1beta1, Resource=deployments", GroupVersionKind: "extensions/v1beta1, Kind=Deployment" I1206 13:12:52.189] Name: "nginx", Namespace: "namespace-1544101965-21293" I1206 13:12:52.190] Object: &{map["kind":"Deployment" "apiVersion":"extensions/v1beta1" "metadata":map["namespace":"namespace-1544101965-21293" "uid":"a073fa15-f958-11e8-9847-0242ac110002" "annotations":map["deployment.kubernetes.io/revision":"1" "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"Deployment\",\"metadata\":{\"annotations\":{},\"labels\":{\"name\":\"nginx\"},\"name\":\"nginx\",\"namespace\":\"namespace-1544101965-21293\"},\"spec\":{\"replicas\":3,\"template\":{\"metadata\":{\"labels\":{\"name\":\"nginx1\"}},\"spec\":{\"containers\":[{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"nginx\",\"ports\":[{\"containerPort\":80}]}]}}}}\n"] "name":"nginx" "selfLink":"/apis/extensions/v1beta1/namespaces/namespace-1544101965-21293/deployments/nginx" "resourceVersion":"703" "generation":'\x01' "creationTimestamp":"2018-12-06T13:12:47Z" "labels":map["name":"nginx"]] "spec":map["replicas":'\x03' "selector":map["matchLabels":map["name":"nginx1"]] "template":map["metadata":map["creationTimestamp":<nil> "labels":map["name":"nginx1"]] "spec":map["containers":[map["terminationMessagePath":"/dev/termination-log" "terminationMessagePolicy":"File" "imagePullPolicy":"IfNotPresent" "name":"nginx" "image":"k8s.gcr.io/nginx:test-cmd" "ports":[map["containerPort":'P' "protocol":"TCP"]] "resources":map[]]] "restartPolicy":"Always" "terminationGracePeriodSeconds":'\x1e' "dnsPolicy":"ClusterFirst" "securityContext":map[] "schedulerName":"default-scheduler"]] "strategy":map["type":"RollingUpdate" "rollingUpdate":map["maxUnavailable":'\x01' "maxSurge":'\x01']] "revisionHistoryLimit":%!q(int64=+2147483647) "progressDeadlineSeconds":%!q(int64=+2147483647)] "status":map["observedGeneration":'\x01' "replicas":'\x03' "updatedReplicas":'\x03' "unavailableReplicas":'\x03' "conditions":[map["status":"False" "lastUpdateTime":"2018-12-06T13:12:47Z" "lastTransitionTime":"2018-12-06T13:12:47Z" "reason":"MinimumReplicasUnavailable" "message":"Deployment does not have minimum availability." "type":"Available"]]]]} I1206 13:12:52.190] for: "hack/testdata/deployment-label-change2.yaml": Operation cannot be fulfilled on deployments.extensions "nginx": the object has been modified; please apply your changes to the latest version and try again I1206 13:12:52.190] has:Error from server (Conflict) W1206 13:12:52.290] kubectl run --generator=job/v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead. W1206 13:12:52.291] I1206 13:12:43.560846 52364 controller.go:608] quota admission added evaluator for: jobs.batch W1206 13:12:52.291] I1206 13:12:43.572825 55730 event.go:221] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1544101963-17999", Name:"pi", UID:"9ddae57c-f958-11e8-9847-0242ac110002", APIVersion:"batch/v1", ResourceVersion:"603", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: pi-8lvwl W1206 13:12:52.291] kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead. W1206 13:12:52.291] I1206 13:12:44.069473 52364 controller.go:608] quota admission added evaluator for: deployments.apps W1206 13:12:52.292] I1206 13:12:44.074983 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544101963-17999", Name:"nginx-extensions", UID:"9e288270-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"610", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-extensions-6fb4b564f5 to 1 ... skipping 18 lines ... I1206 13:12:57.454] "name": "nginx2" I1206 13:12:57.454] has:"name": "nginx2" W1206 13:12:57.555] I1206 13:12:57.376635 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544101965-21293", Name:"nginx", UID:"a61646aa-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"725", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-7777658b9d to 3 W1206 13:12:57.555] I1206 13:12:57.379224 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544101965-21293", Name:"nginx-7777658b9d", UID:"a616c270-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"726", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-7777658b9d-sk5k9 W1206 13:12:57.556] I1206 13:12:57.381221 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544101965-21293", Name:"nginx-7777658b9d", UID:"a616c270-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"726", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-7777658b9d-fcd5j W1206 13:12:57.556] I1206 13:12:57.381525 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544101965-21293", Name:"nginx-7777658b9d", UID:"a616c270-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"726", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-7777658b9d-j96rd W1206 13:13:01.644] E1206 13:13:01.643373 55730 replica_set.go:450] Sync "namespace-1544101965-21293/nginx-7777658b9d" failed with Operation cannot be fulfilled on replicasets.apps "nginx-7777658b9d": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1544101965-21293/nginx-7777658b9d, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: a616c270-f958-11e8-9847-0242ac110002, UID in object meta: W1206 13:13:02.637] I1206 13:13:02.636243 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544101965-21293", Name:"nginx", UID:"a938ac37-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"758", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-7777658b9d to 3 W1206 13:13:02.640] I1206 13:13:02.639569 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544101965-21293", Name:"nginx-7777658b9d", UID:"a93940a9-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"759", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-7777658b9d-fccwp W1206 13:13:02.642] I1206 13:13:02.641664 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544101965-21293", Name:"nginx-7777658b9d", UID:"a93940a9-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"759", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-7777658b9d-c7jk2 W1206 13:13:02.642] I1206 13:13:02.642527 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544101965-21293", Name:"nginx-7777658b9d", UID:"a93940a9-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"759", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-7777658b9d-8zt5t I1206 13:13:02.743] Successful I1206 13:13:02.743] message:The Deployment "nginx" is invalid: spec.template.metadata.labels: Invalid value: map[string]string{"name":"nginx3"}: `selector` does not match template `labels` ... skipping 73 lines ... I1206 13:13:03.794] +++ [1206 13:13:03] Creating namespace namespace-1544101983-25189 I1206 13:13:03.861] namespace/namespace-1544101983-25189 created I1206 13:13:03.924] Context "test" modified. I1206 13:13:03.929] +++ [1206 13:13:03] Testing kubectl get I1206 13:13:04.012] [32mget.sh:29: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:04.089] [mSuccessful I1206 13:13:04.090] message:Error from server (NotFound): pods "abc" not found I1206 13:13:04.090] has:pods "abc" not found I1206 13:13:04.174] [32mget.sh:37: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:04.252] [mSuccessful I1206 13:13:04.252] message:Error from server (NotFound): pods "abc" not found I1206 13:13:04.252] has:pods "abc" not found I1206 13:13:04.333] [32mget.sh:45: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:04.415] [mSuccessful I1206 13:13:04.415] message:{ I1206 13:13:04.416] "apiVersion": "v1", I1206 13:13:04.416] "items": [], ... skipping 23 lines ... I1206 13:13:04.731] has not:No resources found I1206 13:13:04.808] Successful I1206 13:13:04.809] message:NAME I1206 13:13:04.809] has not:No resources found I1206 13:13:04.889] [32mget.sh:73: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:04.997] [mSuccessful I1206 13:13:04.997] message:error: the server doesn't have a resource type "foobar" I1206 13:13:04.997] has not:No resources found I1206 13:13:05.075] Successful I1206 13:13:05.075] message:No resources found. I1206 13:13:05.076] has:No resources found I1206 13:13:05.153] Successful I1206 13:13:05.154] message: I1206 13:13:05.154] has not:No resources found I1206 13:13:05.232] Successful I1206 13:13:05.232] message:No resources found. I1206 13:13:05.232] has:No resources found I1206 13:13:05.318] [32mget.sh:93: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:05.397] [mSuccessful I1206 13:13:05.398] message:Error from server (NotFound): pods "abc" not found I1206 13:13:05.398] has:pods "abc" not found I1206 13:13:05.399] FAIL! I1206 13:13:05.399] message:Error from server (NotFound): pods "abc" not found I1206 13:13:05.400] has not:List I1206 13:13:05.400] 99 /go/src/k8s.io/kubernetes/test/cmd/../../test/cmd/get.sh I1206 13:13:05.511] Successful I1206 13:13:05.511] message:I1206 13:13:05.459731 68233 loader.go:359] Config loaded from file /tmp/tmp.fv7kmhQ2qX/.kube/config I1206 13:13:05.511] I1206 13:13:05.460693 68233 loader.go:359] Config loaded from file /tmp/tmp.fv7kmhQ2qX/.kube/config I1206 13:13:05.511] I1206 13:13:05.462221 68233 round_trippers.go:438] GET http://127.0.0.1:8080/version?timeout=32s 200 OK in 1 milliseconds ... skipping 995 lines ... I1206 13:13:08.894] } I1206 13:13:08.967] [32mget.sh:155: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: I1206 13:13:09.186] [m<no value>Successful I1206 13:13:09.186] message:valid-pod: I1206 13:13:09.186] has:valid-pod: I1206 13:13:09.260] Successful I1206 13:13:09.261] message:error: error executing jsonpath "{.missing}": Error executing template: missing is not found. Printing more information for debugging the template: I1206 13:13:09.261] template was: I1206 13:13:09.261] {.missing} I1206 13:13:09.261] object given to jsonpath engine was: I1206 13:13:09.262] map[string]interface {}{"kind":"Pod", "apiVersion":"v1", "metadata":map[string]interface {}{"selfLink":"/api/v1/namespaces/namespace-1544101988-25130/pods/valid-pod", "uid":"ace7c368-f958-11e8-9847-0242ac110002", "resourceVersion":"795", "creationTimestamp":"2018-12-06T13:13:08Z", "labels":map[string]interface {}{"name":"valid-pod"}, "name":"valid-pod", "namespace":"namespace-1544101988-25130"}, "spec":map[string]interface {}{"containers":[]interface {}{map[string]interface {}{"terminationMessagePath":"/dev/termination-log", "terminationMessagePolicy":"File", "imagePullPolicy":"Always", "name":"kubernetes-serve-hostname", "image":"k8s.gcr.io/serve_hostname", "resources":map[string]interface {}{"limits":map[string]interface {}{"cpu":"1", "memory":"512Mi"}, "requests":map[string]interface {}{"cpu":"1", "memory":"512Mi"}}}}, "restartPolicy":"Always", "terminationGracePeriodSeconds":30, "dnsPolicy":"ClusterFirst", "securityContext":map[string]interface {}{}, "schedulerName":"default-scheduler", "priority":0, "enableServiceLinks":true}, "status":map[string]interface {}{"phase":"Pending", "qosClass":"Guaranteed"}} I1206 13:13:09.262] has:missing is not found I1206 13:13:09.337] Successful I1206 13:13:09.337] message:Error executing template: template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing". Printing more information for debugging the template: I1206 13:13:09.337] template was: I1206 13:13:09.337] {{.missing}} I1206 13:13:09.337] raw data was: I1206 13:13:09.338] {"apiVersion":"v1","kind":"Pod","metadata":{"creationTimestamp":"2018-12-06T13:13:08Z","labels":{"name":"valid-pod"},"name":"valid-pod","namespace":"namespace-1544101988-25130","resourceVersion":"795","selfLink":"/api/v1/namespaces/namespace-1544101988-25130/pods/valid-pod","uid":"ace7c368-f958-11e8-9847-0242ac110002"},"spec":{"containers":[{"image":"k8s.gcr.io/serve_hostname","imagePullPolicy":"Always","name":"kubernetes-serve-hostname","resources":{"limits":{"cpu":"1","memory":"512Mi"},"requests":{"cpu":"1","memory":"512Mi"}},"terminationMessagePath":"/dev/termination-log","terminationMessagePolicy":"File"}],"dnsPolicy":"ClusterFirst","enableServiceLinks":true,"priority":0,"restartPolicy":"Always","schedulerName":"default-scheduler","securityContext":{},"terminationGracePeriodSeconds":30},"status":{"phase":"Pending","qosClass":"Guaranteed"}} I1206 13:13:09.338] object given to template engine was: I1206 13:13:09.338] map[apiVersion:v1 kind:Pod metadata:map[uid:ace7c368-f958-11e8-9847-0242ac110002 creationTimestamp:2018-12-06T13:13:08Z labels:map[name:valid-pod] name:valid-pod namespace:namespace-1544101988-25130 resourceVersion:795 selfLink:/api/v1/namespaces/namespace-1544101988-25130/pods/valid-pod] spec:map[priority:0 restartPolicy:Always schedulerName:default-scheduler securityContext:map[] terminationGracePeriodSeconds:30 containers:[map[image:k8s.gcr.io/serve_hostname imagePullPolicy:Always name:kubernetes-serve-hostname resources:map[limits:map[cpu:1 memory:512Mi] requests:map[cpu:1 memory:512Mi]] terminationMessagePath:/dev/termination-log terminationMessagePolicy:File]] dnsPolicy:ClusterFirst enableServiceLinks:true] status:map[phase:Pending qosClass:Guaranteed]] I1206 13:13:09.338] has:map has no entry for key "missing" W1206 13:13:09.439] error: error executing template "{{.missing}}": template: output:1:2: executing "output" at <.missing>: map has no entry for key "missing" W1206 13:13:10.408] E1206 13:13:10.407494 68625 streamwatcher.go:109] Unable to decode an event from the watch stream: net/http: request canceled (Client.Timeout exceeded while reading body) I1206 13:13:10.508] Successful I1206 13:13:10.509] message:NAME READY STATUS RESTARTS AGE I1206 13:13:10.509] valid-pod 0/1 Pending 0 1s I1206 13:13:10.509] has:STATUS I1206 13:13:10.509] Successful ... skipping 80 lines ... I1206 13:13:12.670] terminationGracePeriodSeconds: 30 I1206 13:13:12.670] status: I1206 13:13:12.670] phase: Pending I1206 13:13:12.670] qosClass: Guaranteed I1206 13:13:12.670] has:name: valid-pod I1206 13:13:12.670] Successful I1206 13:13:12.670] message:Error from server (NotFound): pods "invalid-pod" not found I1206 13:13:12.670] has:"invalid-pod" not found I1206 13:13:12.716] pod "valid-pod" deleted I1206 13:13:12.800] [32mget.sh:193: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:12.944] [mpod/redis-master created I1206 13:13:12.948] pod/valid-pod created I1206 13:13:13.028] Successful ... skipping 284 lines ... I1206 13:13:15.654] message:NAME I1206 13:13:15.654] sample-role I1206 13:13:15.654] has:NAME I1206 13:13:15.655] sample-role W1206 13:13:15.755] kubectl run --generator=job/v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead. W1206 13:13:15.756] I1206 13:13:15.124869 55730 event.go:221] Event(v1.ObjectReference{Kind:"Job", Namespace:"namespace-1544101993-18927", Name:"pi", UID:"b0aa606b-f958-11e8-9847-0242ac110002", APIVersion:"batch/v1", ResourceVersion:"831", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: pi-bv7qt W1206 13:13:15.798] E1206 13:13:15.797581 52364 autoregister_controller.go:190] v1.company.com failed with : apiservices.apiregistration.k8s.io "v1.company.com" already exists I1206 13:13:15.898] customresourcedefinition.apiextensions.k8s.io/foos.company.com created I1206 13:13:15.899] [32mold-print.sh:120: Successful get customresourcedefinitions {{range.items}}{{if eq .metadata.name \"foos.company.com\"}}{{.metadata.name}}:{{end}}{{end}}: foos.company.com: I1206 13:13:15.988] [m[32mold-print.sh:123: Successful get foos {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:16.135] [mSuccessful I1206 13:13:16.136] message: I1206 13:13:16.136] has: ... skipping 9 lines ... I1206 13:13:16.736] Running command: run_create_secret_tests I1206 13:13:16.753] I1206 13:13:16.755] +++ Running case: test-cmd.run_create_secret_tests I1206 13:13:16.757] +++ working dir: /go/src/k8s.io/kubernetes I1206 13:13:16.758] +++ command: run_create_secret_tests I1206 13:13:16.839] Successful I1206 13:13:16.839] message:Error from server (NotFound): secrets "mysecret" not found I1206 13:13:16.839] has:secrets "mysecret" not found W1206 13:13:16.940] I1206 13:13:15.977611 52364 clientconn.go:551] parsed scheme: "" W1206 13:13:16.940] I1206 13:13:15.977641 52364 clientconn.go:557] scheme "" not registered, fallback to default scheme W1206 13:13:16.940] I1206 13:13:15.977698 52364 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}] W1206 13:13:16.940] I1206 13:13:15.977737 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:13:16.940] I1206 13:13:15.978111 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:13:16.941] No resources found. W1206 13:13:16.941] No resources found. I1206 13:13:17.041] Successful I1206 13:13:17.041] message:Error from server (NotFound): secrets "mysecret" not found I1206 13:13:17.041] has:secrets "mysecret" not found I1206 13:13:17.042] Successful I1206 13:13:17.042] message:user-specified I1206 13:13:17.042] has:user-specified I1206 13:13:17.045] Successful I1206 13:13:17.113] {"kind":"ConfigMap","apiVersion":"v1","metadata":{"name":"tester-create-cm","namespace":"default","selfLink":"/api/v1/namespaces/default/configmaps/tester-create-cm","uid":"b1d9eb3a-f958-11e8-9847-0242ac110002","resourceVersion":"868","creationTimestamp":"2018-12-06T13:13:17Z"}} ... skipping 80 lines ... I1206 13:13:18.913] has:Timeout exceeded while reading body I1206 13:13:18.989] Successful I1206 13:13:18.990] message:NAME READY STATUS RESTARTS AGE I1206 13:13:18.990] valid-pod 0/1 Pending 0 1s I1206 13:13:18.990] has:valid-pod I1206 13:13:19.052] Successful I1206 13:13:19.053] message:error: Invalid timeout value. Timeout must be a single integer in seconds, or an integer followed by a corresponding time unit (e.g. 1s | 2m | 3h) I1206 13:13:19.053] has:Invalid timeout value I1206 13:13:19.124] pod "valid-pod" deleted I1206 13:13:19.143] +++ exit code: 0 I1206 13:13:19.172] Recording: run_crd_tests I1206 13:13:19.173] Running command: run_crd_tests I1206 13:13:19.190] ... skipping 166 lines ... I1206 13:13:23.058] foo.company.com/test patched I1206 13:13:23.144] [32mcrd.sh:237: Successful get foos/test {{.patched}}: value1 I1206 13:13:23.219] [mfoo.company.com/test patched I1206 13:13:23.297] [32mcrd.sh:239: Successful get foos/test {{.patched}}: value2 I1206 13:13:23.372] [mfoo.company.com/test patched I1206 13:13:23.462] [32mcrd.sh:241: Successful get foos/test {{.patched}}: <no value> I1206 13:13:23.598] [m+++ [1206 13:13:23] "kubectl patch --local" returns error as expected for CustomResource: error: cannot apply strategic merge patch for company.com/v1, Kind=Foo locally, try --type merge I1206 13:13:23.655] { I1206 13:13:23.655] "apiVersion": "company.com/v1", I1206 13:13:23.655] "kind": "Foo", I1206 13:13:23.655] "metadata": { I1206 13:13:23.655] "annotations": { I1206 13:13:23.655] "kubernetes.io/change-cause": "kubectl patch foos/test --server=http://127.0.0.1:8080 --match-server-version=true --patch={\"patched\":null} --type=merge --record=true" ... skipping 113 lines ... W1206 13:13:25.066] I1206 13:13:21.560067 52364 controller.go:608] quota admission added evaluator for: foos.company.com W1206 13:13:25.067] I1206 13:13:24.727324 52364 controller.go:608] quota admission added evaluator for: bars.company.com W1206 13:13:25.067] /go/src/k8s.io/kubernetes/hack/lib/test.sh: line 264: 71211 Killed while [ ${tries} -lt 10 ]; do W1206 13:13:25.067] tries=$((tries+1)); kubectl "${kube_flags[@]}" patch bars/test -p "{\"patched\":\"${tries}\"}" --type=merge; sleep 1; W1206 13:13:25.067] done W1206 13:13:25.067] /go/src/k8s.io/kubernetes/test/cmd/../../test/cmd/crd.sh: line 295: 71210 Killed kubectl "${kube_flags[@]}" get bars --request-timeout=1m --watch-only -o name W1206 13:13:41.025] E1206 13:13:41.024139 55730 resource_quota_controller.go:437] failed to sync resource monitors: [couldn't start monitor for resource "extensions/v1beta1, Resource=networkpolicies": unable to monitor quota for resource "extensions/v1beta1, Resource=networkpolicies", couldn't start monitor for resource "company.com/v1, Resource=foos": unable to monitor quota for resource "company.com/v1, Resource=foos", couldn't start monitor for resource "company.com/v1, Resource=validfoos": unable to monitor quota for resource "company.com/v1, Resource=validfoos", couldn't start monitor for resource "company.com/v1, Resource=bars": unable to monitor quota for resource "company.com/v1, Resource=bars", couldn't start monitor for resource "mygroup.example.com/v1alpha1, Resource=resources": unable to monitor quota for resource "mygroup.example.com/v1alpha1, Resource=resources"] W1206 13:13:41.220] I1206 13:13:41.219386 55730 controller_utils.go:1027] Waiting for caches to sync for garbage collector controller W1206 13:13:41.221] I1206 13:13:41.220520 52364 clientconn.go:551] parsed scheme: "" W1206 13:13:41.221] I1206 13:13:41.220554 52364 clientconn.go:557] scheme "" not registered, fallback to default scheme W1206 13:13:41.221] I1206 13:13:41.220605 52364 resolver_conn_wrapper.go:116] ccResolverWrapper: sending new addresses to cc: [{127.0.0.1:2379 0 <nil>}] W1206 13:13:41.221] I1206 13:13:41.220653 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:13:41.221] I1206 13:13:41.221065 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] ... skipping 81 lines ... I1206 13:13:52.496] +++ [1206 13:13:52] Testing cmd with image I1206 13:13:52.577] Successful I1206 13:13:52.578] message:deployment.apps/test1 created I1206 13:13:52.578] has:deployment.apps/test1 created I1206 13:13:52.645] deployment.extensions "test1" deleted I1206 13:13:52.713] Successful I1206 13:13:52.713] message:error: Invalid image name "InvalidImageName": invalid reference format I1206 13:13:52.713] has:error: Invalid image name "InvalidImageName": invalid reference format I1206 13:13:52.725] +++ exit code: 0 I1206 13:13:52.766] Recording: run_recursive_resources_tests I1206 13:13:52.767] Running command: run_recursive_resources_tests I1206 13:13:52.783] I1206 13:13:52.785] +++ Running case: test-cmd.run_recursive_resources_tests I1206 13:13:52.787] +++ working dir: /go/src/k8s.io/kubernetes ... skipping 4 lines ... I1206 13:13:52.924] Context "test" modified. I1206 13:13:53.008] [32mgeneric-resources.sh:202: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:53.232] [m[32mgeneric-resources.sh:206: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:53.234] [mSuccessful I1206 13:13:53.234] message:pod/busybox0 created I1206 13:13:53.234] pod/busybox1 created I1206 13:13:53.234] error: error validating "hack/testdata/recursive/pod/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false I1206 13:13:53.235] has:error validating data: kind not set I1206 13:13:53.314] [32mgeneric-resources.sh:211: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:53.468] [m[32mgeneric-resources.sh:219: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: busybox:busybox: I1206 13:13:53.470] [mSuccessful I1206 13:13:53.470] message:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:53.471] has:Object 'Kind' is missing I1206 13:13:53.551] [32mgeneric-resources.sh:226: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:53.782] [m[32mgeneric-resources.sh:230: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced: I1206 13:13:53.784] [mSuccessful I1206 13:13:53.785] message:pod/busybox0 replaced I1206 13:13:53.785] pod/busybox1 replaced I1206 13:13:53.785] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false I1206 13:13:53.785] has:error validating data: kind not set I1206 13:13:53.864] [32mgeneric-resources.sh:235: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:53.953] [mSuccessful I1206 13:13:53.953] message:Name: busybox0 I1206 13:13:53.954] Namespace: namespace-1544102032-4883 I1206 13:13:53.954] Priority: 0 I1206 13:13:53.954] PriorityClassName: <none> ... skipping 159 lines ... I1206 13:13:53.967] has:Object 'Kind' is missing I1206 13:13:54.040] [32mgeneric-resources.sh:245: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:54.201] [m[32mgeneric-resources.sh:249: Successful get pods {{range.items}}{{.metadata.annotations.annotatekey}}:{{end}}: annotatevalue:annotatevalue: I1206 13:13:54.203] [mSuccessful I1206 13:13:54.203] message:pod/busybox0 annotated I1206 13:13:54.203] pod/busybox1 annotated I1206 13:13:54.203] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:54.203] has:Object 'Kind' is missing I1206 13:13:54.284] [32mgeneric-resources.sh:254: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:54.517] [m[32mgeneric-resources.sh:258: Successful get pods {{range.items}}{{.metadata.labels.status}}:{{end}}: replaced:replaced: I1206 13:13:54.519] [mSuccessful I1206 13:13:54.519] message:Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply I1206 13:13:54.519] pod/busybox0 configured I1206 13:13:54.519] Warning: kubectl apply should be used on resource created by either kubectl create --save-config or kubectl apply I1206 13:13:54.519] pod/busybox1 configured I1206 13:13:54.519] error: error validating "hack/testdata/recursive/pod-modify/pod/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false I1206 13:13:54.520] has:error validating data: kind not set I1206 13:13:54.599] [32mgeneric-resources.sh:264: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:54.730] [mdeployment.extensions/nginx created I1206 13:13:54.817] [32mgeneric-resources.sh:268: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx: I1206 13:13:54.902] [m[32mgeneric-resources.sh:269: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd: I1206 13:13:55.054] [m[32mgeneric-resources.sh:273: Successful get deployment nginx {{ .apiVersion }}: extensions/v1beta1 I1206 13:13:55.056] [mSuccessful ... skipping 42 lines ... I1206 13:13:55.129] deployment.extensions "nginx" deleted I1206 13:13:55.218] [32mgeneric-resources.sh:280: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:55.370] [m[32mgeneric-resources.sh:284: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:55.372] [mSuccessful I1206 13:13:55.372] message:kubectl convert is DEPRECATED and will be removed in a future version. I1206 13:13:55.372] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version. I1206 13:13:55.372] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:55.373] has:Object 'Kind' is missing I1206 13:13:55.454] [32mgeneric-resources.sh:289: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:55.530] [mSuccessful I1206 13:13:55.530] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:55.531] has:busybox0:busybox1: I1206 13:13:55.532] Successful I1206 13:13:55.532] message:busybox0:busybox1:error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:55.533] has:Object 'Kind' is missing I1206 13:13:55.614] [32mgeneric-resources.sh:298: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:55.696] [mpod/busybox0 labeled pod/busybox1 labeled error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:55.780] [32mgeneric-resources.sh:303: Successful get pods {{range.items}}{{.metadata.labels.mylabel}}:{{end}}: myvalue:myvalue: I1206 13:13:55.782] [mSuccessful I1206 13:13:55.782] message:pod/busybox0 labeled I1206 13:13:55.782] pod/busybox1 labeled I1206 13:13:55.783] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:55.783] has:Object 'Kind' is missing I1206 13:13:55.865] [32mgeneric-resources.sh:308: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:55.944] [mpod/busybox0 patched pod/busybox1 patched error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:56.026] [32mgeneric-resources.sh:313: Successful get pods {{range.items}}{{(index .spec.containers 0).image}}:{{end}}: prom/busybox:prom/busybox: I1206 13:13:56.028] [mSuccessful I1206 13:13:56.028] message:pod/busybox0 patched I1206 13:13:56.028] pod/busybox1 patched I1206 13:13:56.029] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:56.029] has:Object 'Kind' is missing I1206 13:13:56.110] [32mgeneric-resources.sh:318: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:56.273] [m[32mgeneric-resources.sh:322: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:56.275] [mSuccessful I1206 13:13:56.275] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. I1206 13:13:56.275] pod "busybox0" force deleted I1206 13:13:56.275] pod "busybox1" force deleted I1206 13:13:56.275] error: unable to decode "hack/testdata/recursive/pod/pod/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"Pod","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}' I1206 13:13:56.275] has:Object 'Kind' is missing I1206 13:13:56.354] [32mgeneric-resources.sh:327: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:56.491] [mreplicationcontroller/busybox0 created I1206 13:13:56.494] replicationcontroller/busybox1 created I1206 13:13:56.585] [32mgeneric-resources.sh:331: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:56.669] [m[32mgeneric-resources.sh:336: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:56.748] [m[32mgeneric-resources.sh:337: Successful get rc busybox0 {{.spec.replicas}}: 1 I1206 13:13:56.828] [m[32mgeneric-resources.sh:338: Successful get rc busybox1 {{.spec.replicas}}: 1 I1206 13:13:56.991] [m[32mgeneric-resources.sh:343: Successful get hpa busybox0 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80 I1206 13:13:57.071] [m[32mgeneric-resources.sh:344: Successful get hpa busybox1 {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 1 2 80 I1206 13:13:57.073] [mSuccessful I1206 13:13:57.073] message:horizontalpodautoscaler.autoscaling/busybox0 autoscaled I1206 13:13:57.073] horizontalpodautoscaler.autoscaling/busybox1 autoscaled I1206 13:13:57.073] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:13:57.074] has:Object 'Kind' is missing I1206 13:13:57.144] horizontalpodautoscaler.autoscaling "busybox0" deleted I1206 13:13:57.222] horizontalpodautoscaler.autoscaling "busybox1" deleted I1206 13:13:57.310] [32mgeneric-resources.sh:352: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:57.392] [m[32mgeneric-resources.sh:353: Successful get rc busybox0 {{.spec.replicas}}: 1 I1206 13:13:57.475] [m[32mgeneric-resources.sh:354: Successful get rc busybox1 {{.spec.replicas}}: 1 I1206 13:13:57.642] [m[32mgeneric-resources.sh:358: Successful get service busybox0 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80 I1206 13:13:57.721] [m[32mgeneric-resources.sh:359: Successful get service busybox1 {{(index .spec.ports 0).name}} {{(index .spec.ports 0).port}}: <no value> 80 I1206 13:13:57.723] [mSuccessful I1206 13:13:57.724] message:service/busybox0 exposed I1206 13:13:57.724] service/busybox1 exposed I1206 13:13:57.724] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:13:57.724] has:Object 'Kind' is missing I1206 13:13:57.808] [32mgeneric-resources.sh:365: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:57.888] [m[32mgeneric-resources.sh:366: Successful get rc busybox0 {{.spec.replicas}}: 1 I1206 13:13:57.967] [m[32mgeneric-resources.sh:367: Successful get rc busybox1 {{.spec.replicas}}: 1 I1206 13:13:58.142] [m[32mgeneric-resources.sh:371: Successful get rc busybox0 {{.spec.replicas}}: 2 I1206 13:13:58.221] [m[32mgeneric-resources.sh:372: Successful get rc busybox1 {{.spec.replicas}}: 2 I1206 13:13:58.223] [mSuccessful I1206 13:13:58.223] message:replicationcontroller/busybox0 scaled I1206 13:13:58.223] replicationcontroller/busybox1 scaled I1206 13:13:58.223] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:13:58.223] has:Object 'Kind' is missing I1206 13:13:58.303] [32mgeneric-resources.sh:377: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:13:58.462] [m[32mgeneric-resources.sh:381: Successful get pods {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:58.464] [mSuccessful I1206 13:13:58.464] message:warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. I1206 13:13:58.464] replicationcontroller "busybox0" force deleted I1206 13:13:58.464] replicationcontroller "busybox1" force deleted I1206 13:13:58.464] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:13:58.465] has:Object 'Kind' is missing I1206 13:13:58.544] [32mgeneric-resources.sh:386: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:13:58.677] [mdeployment.extensions/nginx1-deployment created I1206 13:13:58.680] deployment.extensions/nginx0-deployment created I1206 13:13:58.771] [32mgeneric-resources.sh:390: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx0-deployment:nginx1-deployment: I1206 13:13:58.852] [m[32mgeneric-resources.sh:391: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9: I1206 13:13:59.034] [m[32mgeneric-resources.sh:395: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9:k8s.gcr.io/nginx:1.7.9: I1206 13:13:59.036] [mSuccessful I1206 13:13:59.037] message:deployment.extensions/nginx1-deployment skipped rollback (current template already matches revision 1) I1206 13:13:59.037] deployment.extensions/nginx0-deployment skipped rollback (current template already matches revision 1) I1206 13:13:59.037] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' I1206 13:13:59.037] has:Object 'Kind' is missing I1206 13:13:59.115] deployment.extensions/nginx1-deployment paused I1206 13:13:59.118] deployment.extensions/nginx0-deployment paused I1206 13:13:59.209] [32mgeneric-resources.sh:402: Successful get deployment {{range.items}}{{.spec.paused}}:{{end}}: true:true: I1206 13:13:59.211] [mSuccessful I1206 13:13:59.212] message:unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' ... skipping 10 lines ... I1206 13:13:59.488] 1 <none> I1206 13:13:59.488] I1206 13:13:59.488] deployment.extensions/nginx0-deployment I1206 13:13:59.488] REVISION CHANGE-CAUSE I1206 13:13:59.489] 1 <none> I1206 13:13:59.489] I1206 13:13:59.490] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' I1206 13:13:59.490] has:nginx0-deployment I1206 13:13:59.490] Successful I1206 13:13:59.490] message:deployment.extensions/nginx1-deployment I1206 13:13:59.490] REVISION CHANGE-CAUSE I1206 13:13:59.490] 1 <none> I1206 13:13:59.490] I1206 13:13:59.491] deployment.extensions/nginx0-deployment I1206 13:13:59.491] REVISION CHANGE-CAUSE I1206 13:13:59.491] 1 <none> I1206 13:13:59.491] I1206 13:13:59.491] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' I1206 13:13:59.491] has:nginx1-deployment I1206 13:13:59.491] Successful I1206 13:13:59.491] message:deployment.extensions/nginx1-deployment I1206 13:13:59.492] REVISION CHANGE-CAUSE I1206 13:13:59.492] 1 <none> I1206 13:13:59.492] I1206 13:13:59.492] deployment.extensions/nginx0-deployment I1206 13:13:59.492] REVISION CHANGE-CAUSE I1206 13:13:59.492] 1 <none> I1206 13:13:59.492] I1206 13:13:59.492] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' I1206 13:13:59.492] has:Object 'Kind' is missing I1206 13:13:59.565] deployment.extensions "nginx1-deployment" force deleted I1206 13:13:59.570] deployment.extensions "nginx0-deployment" force deleted W1206 13:13:59.671] Error from server (NotFound): namespaces "non-native-resources" not found W1206 13:13:59.671] kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead. W1206 13:13:59.672] I1206 13:13:52.569207 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102032-25286", Name:"test1", UID:"c6fbf2b2-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"982", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set test1-fb488bd5d to 1 W1206 13:13:59.672] I1206 13:13:52.573694 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-25286", Name:"test1-fb488bd5d", UID:"c6fc7853-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"983", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test1-fb488bd5d-rh8xf W1206 13:13:59.672] I1206 13:13:54.733461 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102032-4883", Name:"nginx", UID:"c8461a8c-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1007", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-6f6bb85d9c to 3 W1206 13:13:59.673] I1206 13:13:54.736299 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx-6f6bb85d9c", UID:"c846ad3e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1008", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-6f6bb85d9c-lhs8b W1206 13:13:59.673] I1206 13:13:54.738640 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx-6f6bb85d9c", UID:"c846ad3e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1008", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-6f6bb85d9c-nv27b W1206 13:13:59.673] I1206 13:13:54.739085 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx-6f6bb85d9c", UID:"c846ad3e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1008", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-6f6bb85d9c-bd8vc W1206 13:13:59.673] kubectl convert is DEPRECATED and will be removed in a future version. W1206 13:13:59.673] In order to convert, kubectl apply the object to the cluster, then kubectl get at the desired version. W1206 13:13:59.674] I1206 13:13:56.494418 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102032-4883", Name:"busybox0", UID:"c952d17c-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1038", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-4wcj2 W1206 13:13:59.674] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false W1206 13:13:59.674] I1206 13:13:56.496859 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102032-4883", Name:"busybox1", UID:"c95380be-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1040", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-4h22c W1206 13:13:59.674] I1206 13:13:56.786915 55730 namespace_controller.go:171] Namespace has been deleted non-native-resources W1206 13:13:59.674] I1206 13:13:58.052045 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102032-4883", Name:"busybox0", UID:"c952d17c-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1060", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-94vbk W1206 13:13:59.675] I1206 13:13:58.059384 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102032-4883", Name:"busybox1", UID:"c95380be-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1064", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-p7tmt W1206 13:13:59.675] error: error validating "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false W1206 13:13:59.675] I1206 13:13:58.680655 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102032-4883", Name:"nginx1-deployment", UID:"caa05916-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1080", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx1-deployment-75f6fc6747 to 2 W1206 13:13:59.675] I1206 13:13:58.683392 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102032-4883", Name:"nginx0-deployment", UID:"caa10040-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1082", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx0-deployment-b6bb4ccbb to 2 W1206 13:13:59.676] I1206 13:13:58.684322 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx1-deployment-75f6fc6747", UID:"caa0ef76-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1081", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-75f6fc6747-jdc6p W1206 13:13:59.676] I1206 13:13:58.687100 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx1-deployment-75f6fc6747", UID:"caa0ef76-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1081", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx1-deployment-75f6fc6747-c9mrm W1206 13:13:59.676] I1206 13:13:58.690635 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx0-deployment-b6bb4ccbb", UID:"caa16fd1-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1085", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-b6bb4ccbb-kn8v5 W1206 13:13:59.676] I1206 13:13:58.692812 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102032-4883", Name:"nginx0-deployment-b6bb4ccbb", UID:"caa16fd1-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1085", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx0-deployment-b6bb4ccbb-2pfb4 W1206 13:13:59.677] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. W1206 13:13:59.677] error: unable to decode "hack/testdata/recursive/deployment/deployment/nginx-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"extensions/v1beta1","ind":"Deployment","metadata":{"labels":{"app":"nginx2-deployment"},"name":"nginx2-deployment"},"spec":{"replicas":2,"template":{"metadata":{"labels":{"app":"nginx2"}},"spec":{"containers":[{"image":"k8s.gcr.io/nginx:1.7.9","name":"nginx","ports":[{"containerPort":80}]}]}}}}' I1206 13:14:00.656] [32mgeneric-resources.sh:424: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:14:00.786] [mreplicationcontroller/busybox0 created I1206 13:14:00.791] replicationcontroller/busybox1 created I1206 13:14:00.877] [32mgeneric-resources.sh:428: Successful get rc {{range.items}}{{.metadata.name}}:{{end}}: busybox0:busybox1: I1206 13:14:00.961] [mSuccessful I1206 13:14:00.961] message:no rollbacker has been implemented for "ReplicationController" ... skipping 4 lines ... I1206 13:14:00.963] message:no rollbacker has been implemented for "ReplicationController" I1206 13:14:00.963] no rollbacker has been implemented for "ReplicationController" I1206 13:14:00.964] unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:00.964] has:Object 'Kind' is missing I1206 13:14:01.043] Successful I1206 13:14:01.044] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:01.044] error: replicationcontrollers "busybox0" pausing is not supported I1206 13:14:01.044] error: replicationcontrollers "busybox1" pausing is not supported I1206 13:14:01.044] has:Object 'Kind' is missing I1206 13:14:01.045] Successful I1206 13:14:01.046] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:01.046] error: replicationcontrollers "busybox0" pausing is not supported I1206 13:14:01.046] error: replicationcontrollers "busybox1" pausing is not supported I1206 13:14:01.046] has:replicationcontrollers "busybox0" pausing is not supported I1206 13:14:01.047] Successful I1206 13:14:01.047] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:01.047] error: replicationcontrollers "busybox0" pausing is not supported I1206 13:14:01.048] error: replicationcontrollers "busybox1" pausing is not supported I1206 13:14:01.048] has:replicationcontrollers "busybox1" pausing is not supported I1206 13:14:01.128] Successful I1206 13:14:01.129] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:01.129] error: replicationcontrollers "busybox0" resuming is not supported I1206 13:14:01.129] error: replicationcontrollers "busybox1" resuming is not supported I1206 13:14:01.129] has:Object 'Kind' is missing I1206 13:14:01.130] Successful I1206 13:14:01.131] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:01.131] error: replicationcontrollers "busybox0" resuming is not supported I1206 13:14:01.131] error: replicationcontrollers "busybox1" resuming is not supported I1206 13:14:01.131] has:replicationcontrollers "busybox0" resuming is not supported I1206 13:14:01.132] Successful I1206 13:14:01.133] message:unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:01.133] error: replicationcontrollers "busybox0" resuming is not supported I1206 13:14:01.133] error: replicationcontrollers "busybox1" resuming is not supported I1206 13:14:01.133] has:replicationcontrollers "busybox0" resuming is not supported I1206 13:14:01.202] replicationcontroller "busybox0" force deleted I1206 13:14:01.206] replicationcontroller "busybox1" force deleted W1206 13:14:01.307] I1206 13:14:00.789105 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102032-4883", Name:"busybox0", UID:"cbe21f30-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1125", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox0-8w9kj W1206 13:14:01.307] error: error validating "hack/testdata/recursive/rc/rc/busybox-broken.yaml": error validating data: kind not set; if you choose to ignore these errors, turn validation off with --validate=false W1206 13:14:01.307] I1206 13:14:00.793365 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102032-4883", Name:"busybox1", UID:"cbe31580-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1129", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: busybox1-tnnn2 W1206 13:14:01.307] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. W1206 13:14:01.308] error: unable to decode "hack/testdata/recursive/rc/rc/busybox-broken.yaml": Object 'Kind' is missing in '{"apiVersion":"v1","ind":"ReplicationController","metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"replicas":1,"selector":{"app":"busybox2"},"template":{"metadata":{"labels":{"app":"busybox2"},"name":"busybox2"},"spec":{"containers":[{"command":["sleep","3600"],"image":"busybox","imagePullPolicy":"IfNotPresent","name":"busybox"}],"restartPolicy":"Always"}}}}' I1206 13:14:02.224] +++ exit code: 0 I1206 13:14:02.259] Recording: run_namespace_tests I1206 13:14:02.259] Running command: run_namespace_tests I1206 13:14:02.276] I1206 13:14:02.278] +++ Running case: test-cmd.run_namespace_tests I1206 13:14:02.280] +++ working dir: /go/src/k8s.io/kubernetes I1206 13:14:02.282] +++ command: run_namespace_tests I1206 13:14:02.290] +++ [1206 13:14:02] Testing kubectl(v1:namespaces) I1206 13:14:02.353] namespace/my-namespace created I1206 13:14:02.435] [32mcore.sh:1295: Successful get namespaces/my-namespace {{.metadata.name}}: my-namespace I1206 13:14:02.504] [mnamespace "my-namespace" deleted I1206 13:14:07.605] namespace/my-namespace condition met I1206 13:14:07.680] Successful I1206 13:14:07.681] message:Error from server (NotFound): namespaces "my-namespace" not found I1206 13:14:07.681] has: not found I1206 13:14:07.780] [32mcore.sh:1310: Successful get namespaces {{range.items}}{{ if eq $id_field \"other\" }}found{{end}}{{end}}:: : I1206 13:14:07.843] [mnamespace/other created I1206 13:14:07.924] [32mcore.sh:1314: Successful get namespaces/other {{.metadata.name}}: other I1206 13:14:08.003] [m[32mcore.sh:1318: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:14:08.137] [mpod/valid-pod created I1206 13:14:08.222] [32mcore.sh:1322: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: I1206 13:14:08.304] [m[32mcore.sh:1324: Successful get pods -n other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: I1206 13:14:08.376] [mSuccessful I1206 13:14:08.376] message:error: a resource cannot be retrieved by name across all namespaces I1206 13:14:08.376] has:a resource cannot be retrieved by name across all namespaces I1206 13:14:08.458] [32mcore.sh:1331: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: valid-pod: I1206 13:14:08.532] [mpod "valid-pod" force deleted I1206 13:14:08.618] [32mcore.sh:1335: Successful get pods --namespace=other {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:14:08.687] [mnamespace "other" deleted W1206 13:14:08.788] warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. W1206 13:14:11.030] E1206 13:14:11.029742 55730 resource_quota_controller.go:437] failed to sync resource monitors: couldn't start monitor for resource "extensions/v1beta1, Resource=networkpolicies": unable to monitor quota for resource "extensions/v1beta1, Resource=networkpolicies" W1206 13:14:11.342] I1206 13:14:11.342233 55730 controller_utils.go:1027] Waiting for caches to sync for garbage collector controller W1206 13:14:11.443] I1206 13:14:11.442628 55730 controller_utils.go:1034] Caches are synced for garbage collector controller W1206 13:14:11.903] I1206 13:14:11.903174 55730 horizontal.go:309] Horizontal Pod Autoscaler busybox0 has been deleted in namespace-1544102032-4883 W1206 13:14:11.908] I1206 13:14:11.907598 55730 horizontal.go:309] Horizontal Pod Autoscaler busybox1 has been deleted in namespace-1544102032-4883 W1206 13:14:12.602] I1206 13:14:12.601775 55730 namespace_controller.go:171] Namespace has been deleted my-namespace I1206 13:14:13.823] +++ exit code: 0 ... skipping 113 lines ... I1206 13:14:28.636] +++ command: run_client_config_tests I1206 13:14:28.645] +++ [1206 13:14:28] Creating namespace namespace-1544102068-14317 I1206 13:14:28.712] namespace/namespace-1544102068-14317 created I1206 13:14:28.775] Context "test" modified. I1206 13:14:28.781] +++ [1206 13:14:28] Testing client config I1206 13:14:28.840] Successful I1206 13:14:28.840] message:error: stat missing: no such file or directory I1206 13:14:28.840] has:missing: no such file or directory I1206 13:14:28.898] Successful I1206 13:14:28.898] message:error: stat missing: no such file or directory I1206 13:14:28.898] has:missing: no such file or directory I1206 13:14:28.959] Successful I1206 13:14:28.959] message:error: stat missing: no such file or directory I1206 13:14:28.959] has:missing: no such file or directory I1206 13:14:29.021] Successful I1206 13:14:29.021] message:Error in configuration: context was not found for specified context: missing-context I1206 13:14:29.021] has:context was not found for specified context: missing-context I1206 13:14:29.080] Successful I1206 13:14:29.080] message:error: no server found for cluster "missing-cluster" I1206 13:14:29.080] has:no server found for cluster "missing-cluster" I1206 13:14:29.141] Successful I1206 13:14:29.142] message:error: auth info "missing-user" does not exist I1206 13:14:29.142] has:auth info "missing-user" does not exist I1206 13:14:29.260] Successful I1206 13:14:29.261] message:error: Error loading config file "/tmp/newconfig.yaml": no kind "Config" is registered for version "v-1" in scheme "k8s.io/client-go/tools/clientcmd/api/latest/latest.go:50" I1206 13:14:29.261] has:Error loading config file I1206 13:14:29.320] Successful I1206 13:14:29.320] message:error: stat missing-config: no such file or directory I1206 13:14:29.320] has:no such file or directory I1206 13:14:29.333] +++ exit code: 0 I1206 13:14:29.395] Recording: run_service_accounts_tests I1206 13:14:29.395] Running command: run_service_accounts_tests I1206 13:14:29.412] I1206 13:14:29.414] +++ Running case: test-cmd.run_service_accounts_tests ... skipping 76 lines ... I1206 13:14:36.424] job-name=test-job I1206 13:14:36.424] run=pi I1206 13:14:36.424] Annotations: cronjob.kubernetes.io/instantiate: manual I1206 13:14:36.424] Parallelism: 1 I1206 13:14:36.424] Completions: 1 I1206 13:14:36.424] Start Time: Thu, 06 Dec 2018 13:14:36 +0000 I1206 13:14:36.424] Pods Statuses: 1 Running / 0 Succeeded / 0 Failed I1206 13:14:36.425] Pod Template: I1206 13:14:36.425] Labels: controller-uid=e0fbc1dd-f958-11e8-9847-0242ac110002 I1206 13:14:36.425] job-name=test-job I1206 13:14:36.425] run=pi I1206 13:14:36.425] Containers: I1206 13:14:36.425] pi: ... skipping 329 lines ... I1206 13:14:45.536] selector: I1206 13:14:45.536] role: padawan I1206 13:14:45.536] sessionAffinity: None I1206 13:14:45.536] type: ClusterIP I1206 13:14:45.536] status: I1206 13:14:45.536] loadBalancer: {} W1206 13:14:45.637] error: you must specify resources by --filename when --local is set. W1206 13:14:45.637] Example resource specifications include: W1206 13:14:45.637] '-f rsrc.yaml' W1206 13:14:45.637] '--filename=rsrc.json' I1206 13:14:45.738] [32mcore.sh:886: Successful get services redis-master {{range.spec.selector}}{{.}}:{{end}}: redis:master:backend: I1206 13:14:45.834] [m[32mcore.sh:893: Successful get services {{range.items}}{{.metadata.name}}:{{end}}: kubernetes:redis-master: I1206 13:14:45.907] [mservice "redis-master" deleted ... skipping 93 lines ... I1206 13:14:51.150] [m[32mapps.sh:80: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd: I1206 13:14:51.229] [m[32mapps.sh:81: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2 I1206 13:14:51.325] [mdaemonset.extensions/bind rolled back I1206 13:14:51.409] [32mapps.sh:84: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0: I1206 13:14:51.490] [m[32mapps.sh:85: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1 I1206 13:14:51.582] [mSuccessful I1206 13:14:51.582] message:error: unable to find specified revision 1000000 in history I1206 13:14:51.583] has:unable to find specified revision I1206 13:14:51.664] [32mapps.sh:89: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:2.0: I1206 13:14:51.747] [m[32mapps.sh:90: Successful get daemonset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1 I1206 13:14:51.847] [mdaemonset.extensions/bind rolled back I1206 13:14:51.934] [32mapps.sh:93: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/pause:latest: I1206 13:14:52.012] [m[32mapps.sh:94: Successful get daemonset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd: ... skipping 22 lines ... I1206 13:14:53.184] Namespace: namespace-1544102092-11297 I1206 13:14:53.184] Selector: app=guestbook,tier=frontend I1206 13:14:53.184] Labels: app=guestbook I1206 13:14:53.184] tier=frontend I1206 13:14:53.184] Annotations: <none> I1206 13:14:53.184] Replicas: 3 current / 3 desired I1206 13:14:53.185] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.185] Pod Template: I1206 13:14:53.185] Labels: app=guestbook I1206 13:14:53.185] tier=frontend I1206 13:14:53.185] Containers: I1206 13:14:53.185] php-redis: I1206 13:14:53.185] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 17 lines ... I1206 13:14:53.284] Namespace: namespace-1544102092-11297 I1206 13:14:53.285] Selector: app=guestbook,tier=frontend I1206 13:14:53.285] Labels: app=guestbook I1206 13:14:53.285] tier=frontend I1206 13:14:53.285] Annotations: <none> I1206 13:14:53.285] Replicas: 3 current / 3 desired I1206 13:14:53.285] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.285] Pod Template: I1206 13:14:53.285] Labels: app=guestbook I1206 13:14:53.285] tier=frontend I1206 13:14:53.285] Containers: I1206 13:14:53.285] php-redis: I1206 13:14:53.286] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 18 lines ... I1206 13:14:53.381] Namespace: namespace-1544102092-11297 I1206 13:14:53.381] Selector: app=guestbook,tier=frontend I1206 13:14:53.381] Labels: app=guestbook I1206 13:14:53.381] tier=frontend I1206 13:14:53.381] Annotations: <none> I1206 13:14:53.382] Replicas: 3 current / 3 desired I1206 13:14:53.382] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.382] Pod Template: I1206 13:14:53.382] Labels: app=guestbook I1206 13:14:53.382] tier=frontend I1206 13:14:53.382] Containers: I1206 13:14:53.382] php-redis: I1206 13:14:53.382] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 12 lines ... I1206 13:14:53.479] Namespace: namespace-1544102092-11297 I1206 13:14:53.479] Selector: app=guestbook,tier=frontend I1206 13:14:53.479] Labels: app=guestbook I1206 13:14:53.480] tier=frontend I1206 13:14:53.480] Annotations: <none> I1206 13:14:53.480] Replicas: 3 current / 3 desired I1206 13:14:53.480] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.480] Pod Template: I1206 13:14:53.480] Labels: app=guestbook I1206 13:14:53.480] tier=frontend I1206 13:14:53.480] Containers: I1206 13:14:53.480] php-redis: I1206 13:14:53.480] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 10 lines ... I1206 13:14:53.481] Type Reason Age From Message I1206 13:14:53.481] ---- ------ ---- ---- ------- I1206 13:14:53.481] Normal SuccessfulCreate 1s replication-controller Created pod: frontend-4zdk6 I1206 13:14:53.482] Normal SuccessfulCreate 1s replication-controller Created pod: frontend-ndnvp I1206 13:14:53.482] Normal SuccessfulCreate 1s replication-controller Created pod: frontend-6fmwf I1206 13:14:53.482] [m W1206 13:14:53.585] E1206 13:14:51.854343 55730 daemon_controller.go:303] namespace-1544102089-30614/bind failed with : error storing status for daemon set &v1.DaemonSet{TypeMeta:v1.TypeMeta{Kind:"", APIVersion:""}, ObjectMeta:v1.ObjectMeta{Name:"bind", GenerateName:"", Namespace:"namespace-1544102089-30614", SelfLink:"/apis/apps/v1/namespaces/namespace-1544102089-30614/daemonsets/bind", UID:"e9420f46-f958-11e8-9847-0242ac110002", ResourceVersion:"1343", Generation:4, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:63679698890, loc:(*time.Location)(0x66fa920)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string{"deprecated.daemonset.template.generation":"4", "kubectl.kubernetes.io/last-applied-configuration":"{\"apiVersion\":\"extensions/v1beta1\",\"kind\":\"DaemonSet\",\"metadata\":{\"annotations\":{\"kubernetes.io/change-cause\":\"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true\"},\"name\":\"bind\",\"namespace\":\"namespace-1544102089-30614\"},\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"service\":\"bind\"}},\"spec\":{\"affinity\":{\"podAntiAffinity\":{\"requiredDuringSchedulingIgnoredDuringExecution\":[{\"labelSelector\":{\"matchExpressions\":[{\"key\":\"service\",\"operator\":\"In\",\"values\":[\"bind\"]}]},\"namespaces\":[],\"topologyKey\":\"kubernetes.io/hostname\"}]}},\"containers\":[{\"image\":\"k8s.gcr.io/pause:latest\",\"name\":\"kubernetes-pause\"},{\"image\":\"k8s.gcr.io/nginx:test-cmd\",\"name\":\"app\"}]}},\"updateStrategy\":{\"rollingUpdate\":{\"maxUnavailable\":\"10%\"},\"type\":\"RollingUpdate\"}}}\n", "kubernetes.io/change-cause":"kubectl apply --filename=hack/testdata/rollingupdate-daemonset-rv2.yaml --record=true --server=http://127.0.0.1:8080 --match-server-version=true"}, OwnerReferences:[]v1.OwnerReference(nil), Initializers:(*v1.Initializers)(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:v1.DaemonSetSpec{Selector:(*v1.LabelSelector)(0xc00325ae00), Template:v1.PodTemplateSpec{ObjectMeta:v1.ObjectMeta{Name:"", GenerateName:"", Namespace:"", SelfLink:"", UID:"", ResourceVersion:"", Generation:0, CreationTimestamp:v1.Time{Time:time.Time{wall:0x0, ext:0, loc:(*time.Location)(nil)}}, DeletionTimestamp:(*v1.Time)(nil), DeletionGracePeriodSeconds:(*int64)(nil), Labels:map[string]string{"service":"bind"}, Annotations:map[string]string(nil), OwnerReferences:[]v1.OwnerReference(nil), Initializers:(*v1.Initializers)(nil), Finalizers:[]string(nil), ClusterName:""}, Spec:v1.PodSpec{Volumes:[]v1.Volume(nil), InitContainers:[]v1.Container(nil), Containers:[]v1.Container{v1.Container{Name:"kubernetes-pause", Image:"k8s.gcr.io/pause:latest", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}, v1.Container{Name:"app", Image:"k8s.gcr.io/nginx:test-cmd", Command:[]string(nil), Args:[]string(nil), WorkingDir:"", Ports:[]v1.ContainerPort(nil), EnvFrom:[]v1.EnvFromSource(nil), Env:[]v1.EnvVar(nil), Resources:v1.ResourceRequirements{Limits:v1.ResourceList(nil), Requests:v1.ResourceList(nil)}, VolumeMounts:[]v1.VolumeMount(nil), VolumeDevices:[]v1.VolumeDevice(nil), LivenessProbe:(*v1.Probe)(nil), ReadinessProbe:(*v1.Probe)(nil), Lifecycle:(*v1.Lifecycle)(nil), TerminationMessagePath:"/dev/termination-log", TerminationMessagePolicy:"File", ImagePullPolicy:"IfNotPresent", SecurityContext:(*v1.SecurityContext)(nil), Stdin:false, StdinOnce:false, TTY:false}}, RestartPolicy:"Always", TerminationGracePeriodSeconds:(*int64)(0xc0033a6308), ActiveDeadlineSeconds:(*int64)(nil), DNSPolicy:"ClusterFirst", NodeSelector:map[string]string(nil), ServiceAccountName:"", DeprecatedServiceAccount:"", AutomountServiceAccountToken:(*bool)(nil), NodeName:"", HostNetwork:false, HostPID:false, HostIPC:false, ShareProcessNamespace:(*bool)(nil), SecurityContext:(*v1.PodSecurityContext)(0xc00347a9c0), ImagePullSecrets:[]v1.LocalObjectReference(nil), Hostname:"", Subdomain:"", Affinity:(*v1.Affinity)(0xc00325ae60), SchedulerName:"default-scheduler", Tolerations:[]v1.Toleration(nil), HostAliases:[]v1.HostAlias(nil), PriorityClassName:"", Priority:(*int32)(nil), DNSConfig:(*v1.PodDNSConfig)(nil), ReadinessGates:[]v1.PodReadinessGate(nil), RuntimeClassName:(*string)(nil), EnableServiceLinks:(*bool)(nil)}}, UpdateStrategy:v1.DaemonSetUpdateStrategy{Type:"RollingUpdate", RollingUpdate:(*v1.RollingUpdateDaemonSet)(0xc0000d9648)}, MinReadySeconds:0, RevisionHistoryLimit:(*int32)(0xc0033a63c0)}, Status:v1.DaemonSetStatus{CurrentNumberScheduled:0, NumberMisscheduled:0, DesiredNumberScheduled:0, NumberReady:0, ObservedGeneration:3, UpdatedNumberScheduled:0, NumberAvailable:0, NumberUnavailable:0, CollisionCount:(*int32)(nil), Conditions:[]v1.DaemonSetCondition(nil)}}: Operation cannot be fulfilled on daemonsets.apps "bind": the object has been modified; please apply your changes to the latest version and try again W1206 13:14:53.585] I1206 13:14:52.601289 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eac3e1ff-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1352", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-nsw6v W1206 13:14:53.586] I1206 13:14:52.602995 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eac3e1ff-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1352", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-r4bmn W1206 13:14:53.586] I1206 13:14:52.603799 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eac3e1ff-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1352", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-6dl6p W1206 13:14:53.586] I1206 13:14:52.976484 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eafd61d5-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1368", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-4zdk6 W1206 13:14:53.586] I1206 13:14:52.978553 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eafd61d5-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1368", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-ndnvp W1206 13:14:53.587] I1206 13:14:52.978850 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eafd61d5-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1368", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-6fmwf ... skipping 2 lines ... I1206 13:14:53.687] Namespace: namespace-1544102092-11297 I1206 13:14:53.688] Selector: app=guestbook,tier=frontend I1206 13:14:53.688] Labels: app=guestbook I1206 13:14:53.688] tier=frontend I1206 13:14:53.688] Annotations: <none> I1206 13:14:53.688] Replicas: 3 current / 3 desired I1206 13:14:53.688] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.688] Pod Template: I1206 13:14:53.688] Labels: app=guestbook I1206 13:14:53.688] tier=frontend I1206 13:14:53.688] Containers: I1206 13:14:53.688] php-redis: I1206 13:14:53.689] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 17 lines ... I1206 13:14:53.700] Namespace: namespace-1544102092-11297 I1206 13:14:53.700] Selector: app=guestbook,tier=frontend I1206 13:14:53.700] Labels: app=guestbook I1206 13:14:53.700] tier=frontend I1206 13:14:53.700] Annotations: <none> I1206 13:14:53.700] Replicas: 3 current / 3 desired I1206 13:14:53.700] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.700] Pod Template: I1206 13:14:53.701] Labels: app=guestbook I1206 13:14:53.701] tier=frontend I1206 13:14:53.701] Containers: I1206 13:14:53.701] php-redis: I1206 13:14:53.701] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 17 lines ... I1206 13:14:53.793] Namespace: namespace-1544102092-11297 I1206 13:14:53.793] Selector: app=guestbook,tier=frontend I1206 13:14:53.793] Labels: app=guestbook I1206 13:14:53.793] tier=frontend I1206 13:14:53.793] Annotations: <none> I1206 13:14:53.793] Replicas: 3 current / 3 desired I1206 13:14:53.794] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.794] Pod Template: I1206 13:14:53.794] Labels: app=guestbook I1206 13:14:53.794] tier=frontend I1206 13:14:53.794] Containers: I1206 13:14:53.794] php-redis: I1206 13:14:53.794] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 11 lines ... I1206 13:14:53.888] Namespace: namespace-1544102092-11297 I1206 13:14:53.888] Selector: app=guestbook,tier=frontend I1206 13:14:53.888] Labels: app=guestbook I1206 13:14:53.888] tier=frontend I1206 13:14:53.888] Annotations: <none> I1206 13:14:53.888] Replicas: 3 current / 3 desired I1206 13:14:53.888] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:14:53.888] Pod Template: I1206 13:14:53.889] Labels: app=guestbook I1206 13:14:53.889] tier=frontend I1206 13:14:53.889] Containers: I1206 13:14:53.889] php-redis: I1206 13:14:53.889] Image: gcr.io/google_samples/gb-frontend:v4 ... skipping 22 lines ... I1206 13:14:54.631] [32mcore.sh:1061: Successful get rc frontend {{.spec.replicas}}: 3 I1206 13:14:54.711] [m[32mcore.sh:1065: Successful get rc frontend {{.spec.replicas}}: 3 I1206 13:14:54.791] [mreplicationcontroller/frontend scaled I1206 13:14:54.875] [32mcore.sh:1069: Successful get rc frontend {{.spec.replicas}}: 2 I1206 13:14:54.947] [mreplicationcontroller "frontend" deleted W1206 13:14:55.048] I1206 13:14:54.058895 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eafd61d5-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1378", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: frontend-4zdk6 W1206 13:14:55.048] error: Expected replicas to be 3, was 2 W1206 13:14:55.048] I1206 13:14:54.548999 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eafd61d5-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1384", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-prchq W1206 13:14:55.048] I1206 13:14:54.795650 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"eafd61d5-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1389", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: frontend-prchq W1206 13:14:55.087] I1206 13:14:55.086753 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"redis-master", UID:"ec3f45a0-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1400", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: redis-master-7dpx4 I1206 13:14:55.188] replicationcontroller/redis-master created I1206 13:14:55.225] replicationcontroller/redis-slave created I1206 13:14:55.316] replicationcontroller/redis-master scaled ... skipping 36 lines ... W1206 13:14:56.732] I1206 13:14:56.169037 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-659fc6fb", UID:"ecd6ca4b-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1470", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-659fc6fb-5xwkx I1206 13:14:56.832] Successful I1206 13:14:56.832] message:service/expose-test-deployment exposed I1206 13:14:56.833] has:service/expose-test-deployment exposed I1206 13:14:56.833] service "expose-test-deployment" deleted I1206 13:14:56.885] Successful I1206 13:14:56.886] message:error: couldn't retrieve selectors via --selector flag or introspection: invalid deployment: no selectors, therefore cannot be exposed I1206 13:14:56.886] See 'kubectl expose -h' for help and examples I1206 13:14:56.886] has:invalid deployment: no selectors I1206 13:14:56.961] Successful I1206 13:14:56.961] message:error: couldn't retrieve selectors via --selector flag or introspection: invalid deployment: no selectors, therefore cannot be exposed I1206 13:14:56.961] See 'kubectl expose -h' for help and examples I1206 13:14:56.961] has:invalid deployment: no selectors I1206 13:14:57.092] deployment.extensions/nginx-deployment created I1206 13:14:57.176] [32mcore.sh:1133: Successful get deployment nginx-deployment {{.spec.replicas}}: 3 I1206 13:14:57.255] [mservice/nginx-deployment exposed I1206 13:14:57.336] [32mcore.sh:1137: Successful get service nginx-deployment {{(index .spec.ports 0).port}}: 80 ... skipping 23 lines ... I1206 13:14:58.726] service "frontend" deleted I1206 13:14:58.732] service "frontend-2" deleted I1206 13:14:58.738] service "frontend-3" deleted I1206 13:14:58.744] service "frontend-4" deleted I1206 13:14:58.749] service "frontend-5" deleted I1206 13:14:58.836] Successful I1206 13:14:58.836] message:error: cannot expose a Node I1206 13:14:58.836] has:cannot expose I1206 13:14:58.920] Successful I1206 13:14:58.920] message:The Service "invalid-large-service-name-that-has-more-than-sixty-three-characters" is invalid: metadata.name: Invalid value: "invalid-large-service-name-that-has-more-than-sixty-three-characters": must be no more than 63 characters I1206 13:14:58.920] has:metadata.name: Invalid value I1206 13:14:59.004] Successful I1206 13:14:59.004] message:service/kubernetes-serve-hostname-testing-sixty-three-characters-in-len exposed ... skipping 30 lines ... I1206 13:15:00.946] horizontalpodautoscaler.autoscaling/frontend autoscaled I1206 13:15:01.029] [32mcore.sh:1237: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80 I1206 13:15:01.101] [mhorizontalpodautoscaler.autoscaling "frontend" deleted W1206 13:15:01.202] I1206 13:15:00.546457 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"ef806e0a-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1625", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-dpjgg W1206 13:15:01.202] I1206 13:15:00.548637 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"ef806e0a-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1625", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-kckmp W1206 13:15:01.203] I1206 13:15:00.548862 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102092-11297", Name:"frontend", UID:"ef806e0a-f958-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"1625", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-lfmbl W1206 13:15:01.203] Error: required flag(s) "max" not set W1206 13:15:01.203] W1206 13:15:01.203] W1206 13:15:01.203] Examples: W1206 13:15:01.203] # Auto scale a deployment "foo", with the number of pods between 2 and 10, no target CPU utilization specified so a default autoscaling policy will be used: W1206 13:15:01.204] kubectl autoscale deployment foo --min=2 --max=10 W1206 13:15:01.204] ... skipping 54 lines ... I1206 13:15:01.388] limits: I1206 13:15:01.388] cpu: 300m I1206 13:15:01.388] requests: I1206 13:15:01.388] cpu: 300m I1206 13:15:01.388] terminationGracePeriodSeconds: 0 I1206 13:15:01.389] status: {} W1206 13:15:01.489] Error from server (NotFound): deployments.extensions "nginx-deployment-resources" not found I1206 13:15:01.596] deployment.extensions/nginx-deployment-resources created I1206 13:15:01.686] [32mcore.sh:1252: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment-resources: I1206 13:15:01.767] [m[32mcore.sh:1253: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd: I1206 13:15:01.849] [m[32mcore.sh:1254: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/perl: I1206 13:15:01.931] [mdeployment.extensions/nginx-deployment-resources resource requirements updated I1206 13:15:02.018] [32mcore.sh:1257: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 100m: ... skipping 81 lines ... W1206 13:15:02.937] I1206 13:15:01.605534 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-69c96fd869", UID:"f0219dad-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1646", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-69c96fd869-2lvrf W1206 13:15:02.938] I1206 13:15:01.934158 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1659", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-6c5996c457 to 1 W1206 13:15:02.938] I1206 13:15:01.937040 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-6c5996c457", UID:"f054bfa8-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1660", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-6c5996c457-689dh W1206 13:15:02.938] I1206 13:15:01.939711 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1659", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-resources-69c96fd869 to 2 W1206 13:15:02.938] I1206 13:15:01.943578 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-69c96fd869", UID:"f0219dad-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1666", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-69c96fd869-2lvrf W1206 13:15:02.939] I1206 13:15:01.944410 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1662", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-6c5996c457 to 2 W1206 13:15:02.939] E1206 13:15:01.945480 55730 replica_set.go:450] Sync "namespace-1544102092-11297/nginx-deployment-resources-6c5996c457" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-resources-6c5996c457": the object has been modified; please apply your changes to the latest version and try again W1206 13:15:02.939] I1206 13:15:01.947884 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-6c5996c457", UID:"f054bfa8-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1671", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-6c5996c457-mgzmq W1206 13:15:02.939] error: unable to find container named redis W1206 13:15:02.940] I1206 13:15:02.266887 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1685", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-resources-69c96fd869 to 0 W1206 13:15:02.940] I1206 13:15:02.272385 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-69c96fd869", UID:"f0219dad-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1689", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-69c96fd869-plzpg W1206 13:15:02.940] I1206 13:15:02.273808 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-69c96fd869", UID:"f0219dad-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1689", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-69c96fd869-qnl45 W1206 13:15:02.940] I1206 13:15:02.272607 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1687", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-5f4579485f to 2 W1206 13:15:02.941] I1206 13:15:02.275148 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-5f4579485f", UID:"f086b076-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1694", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-5f4579485f-r29d7 W1206 13:15:02.941] I1206 13:15:02.277281 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-5f4579485f", UID:"f086b076-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1694", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-5f4579485f-z27s6 W1206 13:15:02.941] I1206 13:15:02.512915 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1707", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-resources-6c5996c457 to 0 W1206 13:15:02.942] I1206 13:15:02.517629 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources", UID:"f0210482-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1709", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-resources-ff8d89cb6 to 2 W1206 13:15:02.942] I1206 13:15:02.706875 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-6c5996c457", UID:"f054bfa8-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1710", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-6c5996c457-689dh W1206 13:15:02.942] I1206 13:15:02.755292 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-6c5996c457", UID:"f054bfa8-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1710", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-resources-6c5996c457-mgzmq W1206 13:15:02.942] error: you must specify resources by --filename when --local is set. W1206 13:15:02.942] Example resource specifications include: W1206 13:15:02.942] '-f rsrc.yaml' W1206 13:15:02.942] '--filename=rsrc.json' I1206 13:15:03.043] [32mcore.sh:1273: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).resources.limits.cpu}}:{{end}}: 200m: I1206 13:15:03.056] [m[32mcore.sh:1274: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.limits.cpu}}:{{end}}: 300m: I1206 13:15:03.137] [m[32mcore.sh:1275: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 1).resources.requests.cpu}}:{{end}}: 300m: ... skipping 15 lines ... I1206 13:15:03.664] message:10 I1206 13:15:03.664] has not:2 I1206 13:15:03.748] Successful I1206 13:15:03.749] message:extensions/v1beta1 I1206 13:15:03.749] has:extensions/v1beta1 W1206 13:15:03.850] I1206 13:15:03.253322 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-ff8d89cb6", UID:"f0ac43bf-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1712", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-ff8d89cb6-wkf45 W1206 13:15:03.850] E1206 13:15:03.302054 55730 replica_set.go:450] Sync "namespace-1544102092-11297/nginx-deployment-resources-6c5996c457" failed with replicasets.apps "nginx-deployment-resources-6c5996c457" not found W1206 13:15:03.851] E1206 13:15:03.352482 55730 replica_set.go:450] Sync "namespace-1544102092-11297/nginx-deployment-resources-5f4579485f" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-resources-5f4579485f": StorageError: invalid object, Code: 4, Key: /registry/replicasets/namespace-1544102092-11297/nginx-deployment-resources-5f4579485f, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: f086b076-f958-11e8-9847-0242ac110002, UID in object meta: W1206 13:15:03.851] I1206 13:15:03.453481 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102092-11297", Name:"nginx-deployment-resources-ff8d89cb6", UID:"f0ac43bf-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1712", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-resources-ff8d89cb6-r95rf W1206 13:15:03.851] I1206 13:15:03.510596 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"test-nginx-extensions", UID:"f144ba04-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1746", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set test-nginx-extensions-5b89c6c69f to 1 W1206 13:15:03.852] E1206 13:15:03.602051 55730 replica_set.go:450] Sync "namespace-1544102092-11297/nginx-deployment-resources-ff8d89cb6" failed with replicasets.apps "nginx-deployment-resources-ff8d89cb6" not found W1206 13:15:03.852] I1206 13:15:03.655289 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"test-nginx-extensions-5b89c6c69f", UID:"f1453aea-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1747", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: test-nginx-extensions-5b89c6c69f-lrgrq I1206 13:15:03.953] Successful I1206 13:15:03.953] message:apps/v1 I1206 13:15:03.953] has:apps/v1 I1206 13:15:03.966] deployment.extensions "test-nginx-extensions" deleted I1206 13:15:04.084] deployment.apps/test-nginx-apps created ... skipping 17 lines ... I1206 13:15:04.729] pod-template-hash=55c9b846cc I1206 13:15:04.729] Annotations: deployment.kubernetes.io/desired-replicas: 1 I1206 13:15:04.729] deployment.kubernetes.io/max-replicas: 2 I1206 13:15:04.729] deployment.kubernetes.io/revision: 1 I1206 13:15:04.730] Controlled By: Deployment/test-nginx-apps I1206 13:15:04.730] Replicas: 1 current / 1 desired I1206 13:15:04.730] Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed I1206 13:15:04.730] Pod Template: I1206 13:15:04.730] Labels: app=test-nginx-apps I1206 13:15:04.730] pod-template-hash=55c9b846cc I1206 13:15:04.730] Containers: I1206 13:15:04.731] nginx: I1206 13:15:04.731] Image: k8s.gcr.io/nginx:test-cmd ... skipping 95 lines ... I1206 13:15:09.345] [m Image: k8s.gcr.io/nginx:test-cmd I1206 13:15:09.427] [32mapps.sh:296: Successful get deployment.extensions {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9: I1206 13:15:09.523] [mdeployment.extensions/nginx rolled back I1206 13:15:10.624] [32mapps.sh:300: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd: I1206 13:15:10.805] [m[32mapps.sh:303: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:test-cmd: I1206 13:15:10.903] [mdeployment.extensions/nginx rolled back W1206 13:15:11.003] error: unable to find specified revision 1000000 in history I1206 13:15:11.991] [32mapps.sh:307: Successful get deployment {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx:1.7.9: I1206 13:15:12.072] [mdeployment.extensions/nginx paused W1206 13:15:12.173] error: you cannot rollback a paused deployment; resume it first with 'kubectl rollout resume deployment/nginx' and try again I1206 13:15:12.274] deployment.extensions/nginx resumed I1206 13:15:12.356] deployment.extensions/nginx rolled back I1206 13:15:12.525] deployment.kubernetes.io/revision-history: 1,3 W1206 13:15:12.698] error: desired revision (3) is different from the running revision (5) I1206 13:15:12.836] deployment.extensions/nginx2 created I1206 13:15:12.915] deployment.extensions "nginx2" deleted I1206 13:15:12.994] deployment.extensions "nginx" deleted I1206 13:15:13.080] [32mapps.sh:329: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:15:13.206] [mdeployment.extensions/nginx-deployment created I1206 13:15:13.292] [32mapps.sh:332: Successful get deployment {{range.items}}{{.metadata.name}}:{{end}}: nginx-deployment: ... skipping 26 lines ... W1206 13:15:15.424] I1206 13:15:13.212144 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f70d3519-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1961", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-646d4f779d-nv8vg W1206 13:15:15.424] I1206 13:15:13.214756 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f70d3519-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1961", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-646d4f779d-wpfw9 W1206 13:15:15.425] I1206 13:15:13.214803 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f70d3519-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1961", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-646d4f779d-qxjzc W1206 13:15:15.425] I1206 13:15:13.534476 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f70ca705-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1974", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-85db47bbdb to 1 W1206 13:15:15.426] I1206 13:15:13.537213 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-85db47bbdb", UID:"f73ec82e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1975", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-85db47bbdb-8q8n4 W1206 13:15:15.426] I1206 13:15:13.539722 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f70ca705-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1974", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-646d4f779d to 2 W1206 13:15:15.426] E1206 13:15:13.544861 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-85db47bbdb" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-85db47bbdb": the object has been modified; please apply your changes to the latest version and try again W1206 13:15:15.427] I1206 13:15:13.544862 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f70d3519-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1981", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-646d4f779d-nv8vg W1206 13:15:15.427] I1206 13:15:13.545182 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f70ca705-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1977", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-85db47bbdb to 2 W1206 13:15:15.427] I1206 13:15:13.548614 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-85db47bbdb", UID:"f73ec82e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"1985", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-85db47bbdb-26kfd W1206 13:15:15.427] error: unable to find container named "redis" W1206 13:15:15.428] I1206 13:15:14.603862 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f70ca705-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2007", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-646d4f779d to 0 W1206 13:15:15.428] I1206 13:15:14.609129 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f70d3519-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2011", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-646d4f779d-wpfw9 W1206 13:15:15.428] I1206 13:15:14.609182 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f70d3519-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2011", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-646d4f779d-qxjzc W1206 13:15:15.429] I1206 13:15:14.610552 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f70ca705-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2010", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-dc756cc6 to 2 W1206 13:15:15.429] I1206 13:15:14.613036 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-dc756cc6", UID:"f7e11044-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2017", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-dc756cc6-wlsjz W1206 13:15:15.429] I1206 13:15:14.615054 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-dc756cc6", UID:"f7e11044-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2017", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-dc756cc6-qps2z ... skipping 55 lines ... I1206 13:15:18.894] Namespace: namespace-1544102117-22468 I1206 13:15:18.894] Selector: app=guestbook,tier=frontend I1206 13:15:18.894] Labels: app=guestbook I1206 13:15:18.895] tier=frontend I1206 13:15:18.895] Annotations: <none> I1206 13:15:18.895] Replicas: 3 current / 3 desired I1206 13:15:18.895] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:18.895] Pod Template: I1206 13:15:18.895] Labels: app=guestbook I1206 13:15:18.895] tier=frontend I1206 13:15:18.895] Containers: I1206 13:15:18.895] php-redis: I1206 13:15:18.896] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 17 lines ... I1206 13:15:18.991] Namespace: namespace-1544102117-22468 I1206 13:15:18.991] Selector: app=guestbook,tier=frontend I1206 13:15:18.991] Labels: app=guestbook I1206 13:15:18.991] tier=frontend I1206 13:15:18.991] Annotations: <none> I1206 13:15:18.991] Replicas: 3 current / 3 desired I1206 13:15:18.991] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:18.992] Pod Template: I1206 13:15:18.992] Labels: app=guestbook I1206 13:15:18.992] tier=frontend I1206 13:15:18.992] Containers: I1206 13:15:18.992] php-redis: I1206 13:15:18.992] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 18 lines ... I1206 13:15:19.083] Namespace: namespace-1544102117-22468 I1206 13:15:19.083] Selector: app=guestbook,tier=frontend I1206 13:15:19.083] Labels: app=guestbook I1206 13:15:19.083] tier=frontend I1206 13:15:19.083] Annotations: <none> I1206 13:15:19.083] Replicas: 3 current / 3 desired I1206 13:15:19.083] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:19.083] Pod Template: I1206 13:15:19.084] Labels: app=guestbook I1206 13:15:19.084] tier=frontend I1206 13:15:19.084] Containers: I1206 13:15:19.084] php-redis: I1206 13:15:19.084] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 12 lines ... I1206 13:15:19.178] Namespace: namespace-1544102117-22468 I1206 13:15:19.179] Selector: app=guestbook,tier=frontend I1206 13:15:19.179] Labels: app=guestbook I1206 13:15:19.179] tier=frontend I1206 13:15:19.179] Annotations: <none> I1206 13:15:19.179] Replicas: 3 current / 3 desired I1206 13:15:19.179] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:19.179] Pod Template: I1206 13:15:19.179] Labels: app=guestbook I1206 13:15:19.179] tier=frontend I1206 13:15:19.180] Containers: I1206 13:15:19.180] php-redis: I1206 13:15:19.180] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 15 lines ... I1206 13:15:19.182] [m W1206 13:15:19.282] I1206 13:15:15.705049 55730 horizontal.go:309] Horizontal Pod Autoscaler frontend has been deleted in namespace-1544102092-11297 W1206 13:15:19.283] I1206 13:15:15.958549 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2060", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-5b795689cd to 1 W1206 13:15:19.283] I1206 13:15:15.961691 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-5b795689cd", UID:"f8b097aa-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2061", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-5b795689cd-gnzlj W1206 13:15:19.283] I1206 13:15:15.964587 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2060", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-646d4f779d to 2 W1206 13:15:19.284] I1206 13:15:15.969236 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-646d4f779d", UID:"f84fe2d1-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2066", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-646d4f779d-kkcrs W1206 13:15:19.284] E1206 13:15:15.969351 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-5b795689cd" failed with Operation cannot be fulfilled on replicasets.apps "nginx-deployment-5b795689cd": the object has been modified; please apply your changes to the latest version and try again W1206 13:15:19.285] I1206 13:15:15.969634 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2062", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-5b795689cd to 2 W1206 13:15:19.285] I1206 13:15:15.972195 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-5b795689cd", UID:"f8b097aa-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2071", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-5b795689cd-4lm86 W1206 13:15:19.285] I1206 13:15:16.228455 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2085", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-5b795689cd to 0 W1206 13:15:19.286] I1206 13:15:16.232875 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-5b795689cd", UID:"f8b097aa-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2089", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-5b795689cd-gnzlj W1206 13:15:19.286] I1206 13:15:16.233729 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-5b795689cd", UID:"f8b097aa-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2089", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-5b795689cd-4lm86 W1206 13:15:19.286] I1206 13:15:16.234749 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2087", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-5766b7c95b to 2 ... skipping 6 lines ... W1206 13:15:19.289] I1206 13:15:16.413033 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-794dcdf6bb", UID:"f8f3bddf-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2118", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-794dcdf6bb-tqkn7 W1206 13:15:19.289] I1206 13:15:16.498549 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2127", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled down replica set nginx-deployment-5766b7c95b to 0 W1206 13:15:19.289] I1206 13:15:16.561623 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-794dcdf6bb", UID:"f8f3bddf-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2118", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: nginx-deployment-794dcdf6bb-pn4lq W1206 13:15:19.290] I1206 13:15:16.609186 55730 event.go:221] Event(v1.ObjectReference{Kind:"Deployment", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment", UID:"f84f514e-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2129", FieldPath:""}): type: 'Normal' reason: 'ScalingReplicaSet' Scaled up replica set nginx-deployment-65b869c68c to 2 W1206 13:15:19.290] I1206 13:15:16.714000 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-5766b7c95b", UID:"f8d8e5e0-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2130", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-5766b7c95b-4jbm6 W1206 13:15:19.290] I1206 13:15:16.764181 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102103-31446", Name:"nginx-deployment-5766b7c95b", UID:"f8d8e5e0-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2130", FieldPath:""}): type: 'Normal' reason: 'SuccessfulDelete' Deleted pod: nginx-deployment-5766b7c95b-9v94v W1206 13:15:19.290] E1206 13:15:16.910496 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-65b869c68c" failed with replicasets.apps "nginx-deployment-65b869c68c" not found W1206 13:15:19.291] E1206 13:15:17.161386 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-794dcdf6bb" failed with replicasets.apps "nginx-deployment-794dcdf6bb" not found W1206 13:15:19.291] E1206 13:15:17.211096 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-669d4f8fc9" failed with replicasets.apps "nginx-deployment-669d4f8fc9" not found W1206 13:15:19.291] E1206 13:15:17.261111 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-5766b7c95b" failed with replicasets.apps "nginx-deployment-5766b7c95b" not found W1206 13:15:19.291] E1206 13:15:17.361175 55730 replica_set.go:450] Sync "namespace-1544102103-31446/nginx-deployment-75bf89d86f" failed with replicasets.apps "nginx-deployment-75bf89d86f" not found W1206 13:15:19.292] I1206 13:15:17.481796 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"f99842f9-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2166", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-czzmc W1206 13:15:19.292] I1206 13:15:17.511892 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"f99842f9-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2166", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-pcw4v W1206 13:15:19.292] I1206 13:15:17.561494 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"f99842f9-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2166", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-xm4sv W1206 13:15:19.292] E1206 13:15:17.761188 55730 replica_set.go:450] Sync "namespace-1544102117-22468/frontend" failed with replicasets.apps "frontend" not found W1206 13:15:19.293] I1206 13:15:17.867974 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend-no-cascade", UID:"f9d367d4-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2181", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-no-cascade-bsfhl W1206 13:15:19.293] I1206 13:15:17.912255 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend-no-cascade", UID:"f9d367d4-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2181", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-no-cascade-c5cwg W1206 13:15:19.293] I1206 13:15:17.962042 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend-no-cascade", UID:"f9d367d4-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2181", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-no-cascade-nvnzp W1206 13:15:19.293] E1206 13:15:18.261652 55730 replica_set.go:450] Sync "namespace-1544102117-22468/frontend-no-cascade" failed with replicasets.apps "frontend-no-cascade" not found W1206 13:15:19.294] I1206 13:15:18.688475 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"fa50a4a3-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2201", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-4grn4 W1206 13:15:19.294] I1206 13:15:18.690749 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"fa50a4a3-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2201", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-kvwfh W1206 13:15:19.294] I1206 13:15:18.691836 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"fa50a4a3-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2201", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-xrfq5 I1206 13:15:19.395] [32mSuccessful describe rs: I1206 13:15:19.395] Name: frontend I1206 13:15:19.395] Namespace: namespace-1544102117-22468 I1206 13:15:19.395] Selector: app=guestbook,tier=frontend I1206 13:15:19.395] Labels: app=guestbook I1206 13:15:19.395] tier=frontend I1206 13:15:19.396] Annotations: <none> I1206 13:15:19.396] Replicas: 3 current / 3 desired I1206 13:15:19.396] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:19.396] Pod Template: I1206 13:15:19.396] Labels: app=guestbook I1206 13:15:19.396] tier=frontend I1206 13:15:19.396] Containers: I1206 13:15:19.396] php-redis: I1206 13:15:19.397] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 17 lines ... I1206 13:15:19.399] Namespace: namespace-1544102117-22468 I1206 13:15:19.399] Selector: app=guestbook,tier=frontend I1206 13:15:19.399] Labels: app=guestbook I1206 13:15:19.399] tier=frontend I1206 13:15:19.399] Annotations: <none> I1206 13:15:19.399] Replicas: 3 current / 3 desired I1206 13:15:19.399] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:19.400] Pod Template: I1206 13:15:19.400] Labels: app=guestbook I1206 13:15:19.400] tier=frontend I1206 13:15:19.400] Containers: I1206 13:15:19.400] php-redis: I1206 13:15:19.400] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 17 lines ... I1206 13:15:19.493] Namespace: namespace-1544102117-22468 I1206 13:15:19.493] Selector: app=guestbook,tier=frontend I1206 13:15:19.493] Labels: app=guestbook I1206 13:15:19.493] tier=frontend I1206 13:15:19.493] Annotations: <none> I1206 13:15:19.494] Replicas: 3 current / 3 desired I1206 13:15:19.494] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:19.494] Pod Template: I1206 13:15:19.494] Labels: app=guestbook I1206 13:15:19.494] tier=frontend I1206 13:15:19.494] Containers: I1206 13:15:19.494] php-redis: I1206 13:15:19.494] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 11 lines ... I1206 13:15:19.593] Namespace: namespace-1544102117-22468 I1206 13:15:19.593] Selector: app=guestbook,tier=frontend I1206 13:15:19.593] Labels: app=guestbook I1206 13:15:19.593] tier=frontend I1206 13:15:19.593] Annotations: <none> I1206 13:15:19.593] Replicas: 3 current / 3 desired I1206 13:15:19.593] Pods Status: 0 Running / 3 Waiting / 0 Succeeded / 0 Failed I1206 13:15:19.594] Pod Template: I1206 13:15:19.594] Labels: app=guestbook I1206 13:15:19.594] tier=frontend I1206 13:15:19.594] Containers: I1206 13:15:19.594] php-redis: I1206 13:15:19.594] Image: gcr.io/google_samples/gb-frontend:v3 ... skipping 184 lines ... I1206 13:15:24.182] horizontalpodautoscaler.autoscaling/frontend autoscaled I1206 13:15:24.262] [32mapps.sh:647: Successful get hpa frontend {{.spec.minReplicas}} {{.spec.maxReplicas}} {{.spec.targetCPUUtilizationPercentage}}: 2 3 80 I1206 13:15:24.331] [mhorizontalpodautoscaler.autoscaling "frontend" deleted W1206 13:15:24.432] I1206 13:15:23.791209 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"fd5b3869-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2391", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-22l5q W1206 13:15:24.432] I1206 13:15:23.793422 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"fd5b3869-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2391", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-ddp22 W1206 13:15:24.432] I1206 13:15:23.793990 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicaSet", Namespace:"namespace-1544102117-22468", Name:"frontend", UID:"fd5b3869-f958-11e8-9847-0242ac110002", APIVersion:"apps/v1", ResourceVersion:"2391", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: frontend-fdbp4 W1206 13:15:24.433] Error: required flag(s) "max" not set W1206 13:15:24.433] W1206 13:15:24.433] W1206 13:15:24.433] Examples: W1206 13:15:24.433] # Auto scale a deployment "foo", with the number of pods between 2 and 10, no target CPU utilization specified so a default autoscaling policy will be used: W1206 13:15:24.433] kubectl autoscale deployment foo --min=2 --max=10 W1206 13:15:24.433] ... skipping 85 lines ... I1206 13:15:27.067] [m[32mapps.sh:431: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0: I1206 13:15:27.152] [m[32mapps.sh:432: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 2 I1206 13:15:27.244] [mstatefulset.apps/nginx rolled back I1206 13:15:27.332] [32mapps.sh:435: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7: I1206 13:15:27.412] [m[32mapps.sh:436: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1 I1206 13:15:27.504] [mSuccessful I1206 13:15:27.504] message:error: unable to find specified revision 1000000 in history I1206 13:15:27.505] has:unable to find specified revision I1206 13:15:27.583] [32mapps.sh:440: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.7: I1206 13:15:27.664] [m[32mapps.sh:441: Successful get statefulset {{range.items}}{{(len .spec.template.spec.containers)}}{{end}}: 1 I1206 13:15:27.755] [mstatefulset.apps/nginx rolled back I1206 13:15:27.839] [32mapps.sh:444: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 0).image}}:{{end}}: k8s.gcr.io/nginx-slim:0.8: I1206 13:15:27.922] [m[32mapps.sh:445: Successful get statefulset {{range.items}}{{(index .spec.template.spec.containers 1).image}}:{{end}}: k8s.gcr.io/pause:2.0: ... skipping 61 lines ... I1206 13:15:29.566] Name: mock I1206 13:15:29.566] Namespace: namespace-1544102128-5893 I1206 13:15:29.566] Selector: app=mock I1206 13:15:29.566] Labels: app=mock I1206 13:15:29.567] Annotations: <none> I1206 13:15:29.567] Replicas: 1 current / 1 desired I1206 13:15:29.567] Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed I1206 13:15:29.567] Pod Template: I1206 13:15:29.567] Labels: app=mock I1206 13:15:29.567] Containers: I1206 13:15:29.567] mock-container: I1206 13:15:29.567] Image: k8s.gcr.io/pause:2.0 I1206 13:15:29.567] Port: 9949/TCP ... skipping 56 lines ... I1206 13:15:31.495] Name: mock I1206 13:15:31.495] Namespace: namespace-1544102128-5893 I1206 13:15:31.495] Selector: app=mock I1206 13:15:31.495] Labels: app=mock I1206 13:15:31.495] Annotations: <none> I1206 13:15:31.495] Replicas: 1 current / 1 desired I1206 13:15:31.495] Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed I1206 13:15:31.495] Pod Template: I1206 13:15:31.495] Labels: app=mock I1206 13:15:31.496] Containers: I1206 13:15:31.496] mock-container: I1206 13:15:31.496] Image: k8s.gcr.io/pause:2.0 I1206 13:15:31.496] Port: 9949/TCP ... skipping 56 lines ... I1206 13:15:33.421] Name: mock I1206 13:15:33.421] Namespace: namespace-1544102128-5893 I1206 13:15:33.421] Selector: app=mock I1206 13:15:33.421] Labels: app=mock I1206 13:15:33.421] Annotations: <none> I1206 13:15:33.421] Replicas: 1 current / 1 desired I1206 13:15:33.421] Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed I1206 13:15:33.421] Pod Template: I1206 13:15:33.421] Labels: app=mock I1206 13:15:33.421] Containers: I1206 13:15:33.422] mock-container: I1206 13:15:33.422] Image: k8s.gcr.io/pause:2.0 I1206 13:15:33.422] Port: 9949/TCP ... skipping 42 lines ... I1206 13:15:35.261] Namespace: namespace-1544102128-5893 I1206 13:15:35.261] Selector: app=mock I1206 13:15:35.261] Labels: app=mock I1206 13:15:35.261] status=replaced I1206 13:15:35.261] Annotations: <none> I1206 13:15:35.261] Replicas: 1 current / 1 desired I1206 13:15:35.261] Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed I1206 13:15:35.262] Pod Template: I1206 13:15:35.262] Labels: app=mock I1206 13:15:35.262] Containers: I1206 13:15:35.262] mock-container: I1206 13:15:35.262] Image: k8s.gcr.io/pause:2.0 I1206 13:15:35.262] Port: 9949/TCP ... skipping 11 lines ... I1206 13:15:35.264] Namespace: namespace-1544102128-5893 I1206 13:15:35.264] Selector: app=mock2 I1206 13:15:35.264] Labels: app=mock2 I1206 13:15:35.264] status=replaced I1206 13:15:35.264] Annotations: <none> I1206 13:15:35.264] Replicas: 1 current / 1 desired I1206 13:15:35.264] Pods Status: 0 Running / 1 Waiting / 0 Succeeded / 0 Failed I1206 13:15:35.264] Pod Template: I1206 13:15:35.264] Labels: app=mock2 I1206 13:15:35.265] Containers: I1206 13:15:35.265] mock-container: I1206 13:15:35.265] Image: k8s.gcr.io/pause:2.0 I1206 13:15:35.265] Port: 9949/TCP ... skipping 107 lines ... I1206 13:15:44.054] +++ [1206 13:15:44] Testing persistent volumes I1206 13:15:44.130] [32mstorage.sh:30: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:15:44.278] [mpersistentvolume/pv0001 created I1206 13:15:44.365] [32mstorage.sh:33: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0001: I1206 13:15:44.433] [mpersistentvolume "pv0001" deleted W1206 13:15:44.534] I1206 13:15:43.260042 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102128-5893", Name:"mock", UID:"08f63254-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2661", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: mock-gqzfb W1206 13:15:44.534] E1206 13:15:44.284306 55730 pv_protection_controller.go:116] PV pv0001 failed with : Operation cannot be fulfilled on persistentvolumes "pv0001": the object has been modified; please apply your changes to the latest version and try again I1206 13:15:44.635] persistentvolume/pv0002 created I1206 13:15:44.652] [32mstorage.sh:36: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0002: I1206 13:15:44.725] [mpersistentvolume "pv0002" deleted I1206 13:15:44.862] persistentvolume/pv0003 created I1206 13:15:44.949] [32mstorage.sh:39: Successful get pv {{range.items}}{{.metadata.name}}:{{end}}: pv0003: I1206 13:15:45.018] [mpersistentvolume "pv0003" deleted ... skipping 10 lines ... I1206 13:15:45.309] Context "test" modified. I1206 13:15:45.315] +++ [1206 13:15:45] Testing persistent volumes claims I1206 13:15:45.392] [32mstorage.sh:57: Successful get pvc {{range.items}}{{.metadata.name}}:{{end}}: I1206 13:15:45.532] [mpersistentvolumeclaim/myclaim-1 created I1206 13:15:45.621] [32mstorage.sh:60: Successful get pvc {{range.items}}{{.metadata.name}}:{{end}}: myclaim-1: I1206 13:15:45.692] [mpersistentvolumeclaim "myclaim-1" deleted W1206 13:15:45.792] E1206 13:15:44.864344 55730 pv_protection_controller.go:116] PV pv0003 failed with : Operation cannot be fulfilled on persistentvolumes "pv0003": the object has been modified; please apply your changes to the latest version and try again W1206 13:15:45.793] I1206 13:15:45.532283 55730 event.go:221] Event(v1.ObjectReference{Kind:"PersistentVolumeClaim", Namespace:"namespace-1544102145-16173", Name:"myclaim-1", UID:"0a51391a-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2692", FieldPath:""}): type: 'Normal' reason: 'FailedBinding' no persistent volumes available for this claim and no storage class is set W1206 13:15:45.793] I1206 13:15:45.534706 55730 event.go:221] Event(v1.ObjectReference{Kind:"PersistentVolumeClaim", Namespace:"namespace-1544102145-16173", Name:"myclaim-1", UID:"0a51391a-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2694", FieldPath:""}): type: 'Normal' reason: 'FailedBinding' no persistent volumes available for this claim and no storage class is set W1206 13:15:45.793] I1206 13:15:45.691904 55730 event.go:221] Event(v1.ObjectReference{Kind:"PersistentVolumeClaim", Namespace:"namespace-1544102145-16173", Name:"myclaim-1", UID:"0a51391a-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2696", FieldPath:""}): type: 'Normal' reason: 'FailedBinding' no persistent volumes available for this claim and no storage class is set W1206 13:15:45.833] I1206 13:15:45.833283 55730 event.go:221] Event(v1.ObjectReference{Kind:"PersistentVolumeClaim", Namespace:"namespace-1544102145-16173", Name:"myclaim-2", UID:"0a7f230b-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2699", FieldPath:""}): type: 'Normal' reason: 'FailedBinding' no persistent volumes available for this claim and no storage class is set W1206 13:15:45.836] I1206 13:15:45.835868 55730 event.go:221] Event(v1.ObjectReference{Kind:"PersistentVolumeClaim", Namespace:"namespace-1544102145-16173", Name:"myclaim-2", UID:"0a7f230b-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2700", FieldPath:""}): type: 'Normal' reason: 'FailedBinding' no persistent volumes available for this claim and no storage class is set I1206 13:15:45.937] persistentvolumeclaim/myclaim-2 created ... skipping 456 lines ... I1206 13:15:49.127] yes I1206 13:15:49.127] has:the server doesn't have a resource type I1206 13:15:49.194] Successful I1206 13:15:49.195] message:yes I1206 13:15:49.195] has:yes I1206 13:15:49.259] Successful I1206 13:15:49.259] message:error: --subresource can not be used with NonResourceURL I1206 13:15:49.259] has:subresource can not be used with NonResourceURL I1206 13:15:49.332] Successful I1206 13:15:49.405] Successful I1206 13:15:49.406] message:yes I1206 13:15:49.406] 0 I1206 13:15:49.406] has:0 ... skipping 6 lines ... I1206 13:15:49.571] role.rbac.authorization.k8s.io/testing-R reconciled I1206 13:15:49.656] [32mlegacy-script.sh:736: Successful get rolebindings -n some-other-random -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-RB: I1206 13:15:49.736] [m[32mlegacy-script.sh:737: Successful get roles -n some-other-random -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-R: I1206 13:15:49.818] [m[32mlegacy-script.sh:738: Successful get clusterrolebindings -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-CRB: I1206 13:15:49.900] [m[32mlegacy-script.sh:739: Successful get clusterroles -l test-cmd=auth {{range.items}}{{.metadata.name}}:{{end}}: testing-CR: I1206 13:15:49.975] [mSuccessful I1206 13:15:49.976] message:error: only rbac.authorization.k8s.io/v1 is supported: not *v1beta1.ClusterRole I1206 13:15:49.976] has:only rbac.authorization.k8s.io/v1 is supported I1206 13:15:50.055] rolebinding.rbac.authorization.k8s.io "testing-RB" deleted I1206 13:15:50.061] role.rbac.authorization.k8s.io "testing-R" deleted I1206 13:15:50.069] clusterrole.rbac.authorization.k8s.io "testing-CR" deleted I1206 13:15:50.075] clusterrolebinding.rbac.authorization.k8s.io "testing-CRB" deleted I1206 13:15:50.084] Recording: run_retrieve_multiple_tests ... skipping 32 lines ... I1206 13:15:51.061] +++ Running case: test-cmd.run_kubectl_explain_tests I1206 13:15:51.062] +++ working dir: /go/src/k8s.io/kubernetes I1206 13:15:51.065] +++ command: run_kubectl_explain_tests I1206 13:15:51.077] +++ [1206 13:15:51] Testing kubectl(v1:explain) W1206 13:15:51.178] I1206 13:15:50.960668 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102150-3418", Name:"cassandra", UID:"0d593dcd-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2741", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: cassandra-sngsz W1206 13:15:51.178] I1206 13:15:50.965062 55730 event.go:221] Event(v1.ObjectReference{Kind:"ReplicationController", Namespace:"namespace-1544102150-3418", Name:"cassandra", UID:"0d593dcd-f959-11e8-9847-0242ac110002", APIVersion:"v1", ResourceVersion:"2741", FieldPath:""}): type: 'Normal' reason: 'SuccessfulCreate' Created pod: cassandra-hsfpt W1206 13:15:51.178] E1206 13:15:50.969598 55730 replica_set.go:450] Sync "namespace-1544102150-3418/cassandra" failed with Operation cannot be fulfilled on replicationcontrollers "cassandra": StorageError: invalid object, Code: 4, Key: /registry/controllers/namespace-1544102150-3418/cassandra, ResourceVersion: 0, AdditionalErrorMsg: Precondition failed: UID in precondition: 0d593dcd-f959-11e8-9847-0242ac110002, UID in object meta: I1206 13:15:51.279] KIND: Pod I1206 13:15:51.279] VERSION: v1 I1206 13:15:51.279] I1206 13:15:51.279] DESCRIPTION: I1206 13:15:51.279] Pod is a collection of containers that can run on a host. This resource is I1206 13:15:51.279] created by clients and scheduled onto hosts. ... skipping 849 lines ... I1206 13:16:16.351] message:node/127.0.0.1 already uncordoned (dry run) I1206 13:16:16.351] has:already uncordoned I1206 13:16:16.428] [32mnode-management.sh:119: Successful get nodes 127.0.0.1 {{.spec.unschedulable}}: <no value> I1206 13:16:16.498] [mnode/127.0.0.1 labeled I1206 13:16:16.578] [32mnode-management.sh:124: Successful get nodes 127.0.0.1 {{.metadata.labels.test}}: label I1206 13:16:16.635] [mSuccessful I1206 13:16:16.635] message:error: cannot specify both a node name and a --selector option I1206 13:16:16.635] See 'kubectl drain -h' for help and examples I1206 13:16:16.636] has:cannot specify both a node name I1206 13:16:16.692] Successful I1206 13:16:16.692] message:error: USAGE: cordon NODE [flags] I1206 13:16:16.693] See 'kubectl cordon -h' for help and examples I1206 13:16:16.693] has:error\: USAGE\: cordon NODE I1206 13:16:16.761] node/127.0.0.1 already uncordoned I1206 13:16:16.825] Successful I1206 13:16:16.825] message:error: You must provide one or more resources by argument or filename. I1206 13:16:16.825] Example resource specifications include: I1206 13:16:16.825] '-f rsrc.yaml' I1206 13:16:16.825] '--filename=rsrc.json' I1206 13:16:16.825] '<resource> <name>' I1206 13:16:16.826] '<resource>' I1206 13:16:16.826] has:must provide one or more resources ... skipping 15 lines ... I1206 13:16:17.211] Successful I1206 13:16:17.211] message:The following kubectl-compatible plugins are available: I1206 13:16:17.211] I1206 13:16:17.211] test/fixtures/pkg/kubectl/plugins/version/kubectl-version I1206 13:16:17.211] - warning: kubectl-version overwrites existing command: "kubectl version" I1206 13:16:17.212] I1206 13:16:17.212] error: one plugin warning was found I1206 13:16:17.212] has:kubectl-version overwrites existing command: "kubectl version" I1206 13:16:17.275] Successful I1206 13:16:17.276] message:The following kubectl-compatible plugins are available: I1206 13:16:17.276] I1206 13:16:17.276] test/fixtures/pkg/kubectl/plugins/kubectl-foo I1206 13:16:17.276] test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo I1206 13:16:17.276] - warning: test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo is overshadowed by a similarly named plugin: test/fixtures/pkg/kubectl/plugins/kubectl-foo I1206 13:16:17.276] I1206 13:16:17.276] error: one plugin warning was found I1206 13:16:17.277] has:test/fixtures/pkg/kubectl/plugins/foo/kubectl-foo is overshadowed by a similarly named plugin I1206 13:16:17.342] Successful I1206 13:16:17.342] message:The following kubectl-compatible plugins are available: I1206 13:16:17.342] I1206 13:16:17.342] test/fixtures/pkg/kubectl/plugins/kubectl-foo I1206 13:16:17.343] has:plugins are available I1206 13:16:17.408] Successful I1206 13:16:17.409] message: I1206 13:16:17.409] error: unable to read directory "test/fixtures/pkg/kubectl/plugins/empty" in your PATH: open test/fixtures/pkg/kubectl/plugins/empty: no such file or directory I1206 13:16:17.409] error: unable to find any kubectl plugins in your PATH I1206 13:16:17.409] has:unable to find any kubectl plugins in your PATH I1206 13:16:17.472] Successful I1206 13:16:17.472] message:I am plugin foo I1206 13:16:17.472] has:plugin foo I1206 13:16:17.537] Successful I1206 13:16:17.537] message:Client Version: version.Info{Major:"1", Minor:"14+", GitVersion:"v1.14.0-alpha.0.883+0351853ea1ae78", GitCommit:"0351853ea1ae783ffe5db3cd6c1fef72bf5e57ec", GitTreeState:"clean", BuildDate:"2018-12-06T13:09:51Z", GoVersion:"go1.11.1", Compiler:"gc", Platform:"linux/amd64"} ... skipping 9 lines ... I1206 13:16:17.608] I1206 13:16:17.609] +++ Running case: test-cmd.run_impersonation_tests I1206 13:16:17.611] +++ working dir: /go/src/k8s.io/kubernetes I1206 13:16:17.613] +++ command: run_impersonation_tests I1206 13:16:17.620] +++ [1206 13:16:17] Testing impersonation I1206 13:16:17.681] Successful I1206 13:16:17.682] message:error: requesting groups or user-extra for without impersonating a user I1206 13:16:17.682] has:without impersonating a user I1206 13:16:17.821] certificatesigningrequest.certificates.k8s.io/foo created I1206 13:16:17.898] [32mauthorization.sh:68: Successful get csr/foo {{.spec.username}}: user1 I1206 13:16:17.975] [m[32mauthorization.sh:69: Successful get csr/foo {{range .spec.groups}}{{.}}{{end}}: system:authenticated I1206 13:16:18.048] [mcertificatesigningrequest.certificates.k8s.io "foo" deleted I1206 13:16:18.185] certificatesigningrequest.certificates.k8s.io/foo created ... skipping 68 lines ... W1206 13:16:18.636] I1206 13:16:18.628686 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.636] I1206 13:16:18.628702 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.636] I1206 13:16:18.628738 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.636] I1206 13:16:18.628747 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.636] I1206 13:16:18.628778 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.636] I1206 13:16:18.628787 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.637] W1206 13:16:18.628796 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.637] W1206 13:16:18.628849 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.637] I1206 13:16:18.628865 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.637] I1206 13:16:18.628878 52364 balancer_v1_wrapper.go:125] balancerWrapper: got update addr from Notify: [{127.0.0.1:2379 <nil>}] W1206 13:16:18.637] W1206 13:16:18.628894 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.637] W1206 13:16:18.628906 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.638] W1206 13:16:18.629000 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.638] W1206 13:16:18.629051 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.638] W1206 13:16:18.629087 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.638] W1206 13:16:18.629095 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.638] W1206 13:16:18.629129 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.639] W1206 13:16:18.629132 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.639] W1206 13:16:18.629198 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.639] W1206 13:16:18.629245 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.639] W1206 13:16:18.629278 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.639] W1206 13:16:18.629315 52364 clientconn.go:1304] grpc: addrConn.createTransport failed to connect to {127.0.0.1:2379 0 <nil>}. Err :connection error: desc = "transport: Error while dialing dial tcp 127.0.0.1:2379: connect: connection refused". Reconnecting... W1206 13:16:18.640] E1206 13:16:18.629318 52364 controller.go:172] rpc error: code = Unavailable desc = transport is closing W1206 13:16:18.664] + make test-integration I1206 13:16:18.765] No resources found I1206 13:16:18.765] pod "test-pod-1" force deleted I1206 13:16:18.765] +++ [1206 13:16:18] TESTS PASSED I1206 13:16:18.765] junit report dir: /workspace/artifacts I1206 13:16:18.765] +++ [1206 13:16:18] Clean up complete ... skipping 227 lines ... I1206 13:29:39.752] ok k8s.io/kubernetes/test/integration/replicationcontroller 55.820s I1206 13:29:39.752] [restful] 2018/12/06 13:20:05 log.go:33: [restful/swagger] listing is available at https://127.0.0.1:42577/swaggerapi I1206 13:29:39.752] [restful] 2018/12/06 13:20:05 log.go:33: [restful/swagger] https://127.0.0.1:42577/swaggerui/ is mapped to folder /swagger-ui/ I1206 13:29:39.752] [restful] 2018/12/06 13:20:08 log.go:33: [restful/swagger] listing is available at https://127.0.0.1:42577/swaggerapi I1206 13:29:39.752] [restful] 2018/12/06 13:20:08 log.go:33: [restful/swagger] https://127.0.0.1:42577/swaggerui/ is mapped to folder /swagger-ui/ I1206 13:29:39.752] ok k8s.io/kubernetes/test/integration/scale 11.079s I1206 13:29:39.752] FAIL k8s.io/kubernetes/test/integration/scheduler 564.717s I1206 13:29:39.753] ok k8s.io/kubernetes/test/integration/scheduler_perf 1.053s I1206 13:29:39.753] ok k8s.io/kubernetes/test/integration/secrets 4.509s I1206 13:29:39.753] ok k8s.io/kubernetes/test/integration/serviceaccount 46.435s I1206 13:29:39.753] [restful] 2018/12/06 13:21:09 log.go:33: [restful/swagger] listing is available at https://127.0.0.1:46519/swaggerapi I1206 13:29:39.753] [restful] 2018/12/06 13:21:09 log.go:33: [restful/swagger] https://127.0.0.1:46519/swaggerui/ is mapped to folder /swagger-ui/ I1206 13:29:39.753] [restful] 2018/12/06 13:21:11 log.go:33: [restful/swagger] listing is available at https://127.0.0.1:46519/swaggerapi ... skipping 7 lines ... I1206 13:29:39.754] [restful] 2018/12/06 13:21:49 log.go:33: [restful/swagger] https://127.0.0.1:45873/swaggerui/ is mapped to folder /swagger-ui/ I1206 13:29:39.754] ok k8s.io/kubernetes/test/integration/tls 13.144s I1206 13:29:39.754] ok k8s.io/kubernetes/test/integration/ttlcontroller 11.188s I1206 13:29:39.754] ok k8s.io/kubernetes/test/integration/volume 91.322s I1206 13:29:39.755] ok k8s.io/kubernetes/vendor/k8s.io/apiextensions-apiserver/test/integration 142.692s I1206 13:29:41.202] +++ [1206 13:29:41] Saved JUnit XML test report to /workspace/artifacts/junit_f5a444384056ebac4f2929ce7b7920ea9733ca19_20181206-131627.xml I1206 13:29:41.204] Makefile:184: recipe for target 'test' failed I1206 13:29:41.213] +++ [1206 13:29:41] Cleaning up etcd W1206 13:29:41.313] make[1]: *** [test] Error 1 W1206 13:29:41.314] !!! [1206 13:29:41] Call tree: W1206 13:29:41.314] !!! [1206 13:29:41] 1: hack/make-rules/test-integration.sh:105 runTests(...) W1206 13:29:41.353] make: *** [test-integration] Error 1 I1206 13:29:41.454] +++ [1206 13:29:41] Integration test cleanup complete I1206 13:29:41.454] Makefile:203: recipe for target 'test-integration' failed W1206 13:29:42.474] Traceback (most recent call last): W1206 13:29:42.474] File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 167, in <module> W1206 13:29:42.474] main(ARGS.branch, ARGS.script, ARGS.force, ARGS.prow) W1206 13:29:42.474] File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 136, in main W1206 13:29:42.474] check(*cmd) W1206 13:29:42.475] File "/workspace/./test-infra/jenkins/../scenarios/kubernetes_verify.py", line 48, in check W1206 13:29:42.475] subprocess.check_call(cmd) W1206 13:29:42.475] File "/usr/lib/python2.7/subprocess.py", line 540, in check_call W1206 13:29:42.500] raise CalledProcessError(retcode, cmd) W1206 13:29:42.501] subprocess.CalledProcessError: Command '('docker', 'run', '--rm=true', '--privileged=true', '-v', '/var/run/docker.sock:/var/run/docker.sock', '-v', '/etc/localtime:/etc/localtime:ro', '-v', '/workspace/k8s.io/kubernetes:/go/src/k8s.io/kubernetes', '-v', '/workspace/k8s.io/:/workspace/k8s.io/', '-v', '/workspace/_artifacts:/workspace/artifacts', '-e', 'KUBE_FORCE_VERIFY_CHECKS=y', '-e', 'KUBE_VERIFY_GIT_BRANCH=master', '-e', 'REPO_DIR=/workspace/k8s.io/kubernetes', '--tmpfs', '/tmp:exec,mode=1777', 'gcr.io/k8s-testimages/kubekins-test:1.13-v20181105-ceed87206', 'bash', '-c', 'cd kubernetes && ./hack/jenkins/test-dockerized.sh')' returned non-zero exit status 2 E1206 13:29:42.508] Command failed I1206 13:29:42.508] process 576 exited with code 1 after 25.1m E1206 13:29:42.509] FAIL: ci-kubernetes-integration-master I1206 13:29:42.509] Call: gcloud auth activate-service-account --key-file=/etc/service-account/service-account.json W1206 13:29:43.008] Activated service account credentials for: [pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com] I1206 13:29:43.047] process 125097 exited with code 0 after 0.0m I1206 13:29:43.048] Call: gcloud config get-value account I1206 13:29:43.289] process 125110 exited with code 0 after 0.0m I1206 13:29:43.289] Will upload results to gs://kubernetes-jenkins/logs using pr-kubekins@kubernetes-jenkins-pull.iam.gserviceaccount.com I1206 13:29:43.289] Upload result and artifacts... I1206 13:29:43.289] Gubernator results at https://gubernator.k8s.io/build/kubernetes-jenkins/logs/ci-kubernetes-integration-master/7156 I1206 13:29:43.290] Call: gsutil ls gs://kubernetes-jenkins/logs/ci-kubernetes-integration-master/7156/artifacts W1206 13:29:45.038] CommandException: One or more URLs matched no objects. E1206 13:29:45.186] Command failed I1206 13:29:45.186] process 125123 exited with code 1 after 0.0m W1206 13:29:45.186] Remote dir gs://kubernetes-jenkins/logs/ci-kubernetes-integration-master/7156/artifacts not exist yet I1206 13:29:45.186] Call: gsutil -m -q -o GSUtil:use_magicfile=True cp -r -c -z log,txt,xml /workspace/_artifacts gs://kubernetes-jenkins/logs/ci-kubernetes-integration-master/7156/artifacts I1206 13:29:48.770] process 125268 exited with code 0 after 0.1m W1206 13:29:48.770] metadata path /workspace/_artifacts/metadata.json does not exist W1206 13:29:48.771] metadata not found or invalid, init with empty metadata ... skipping 15 lines ...