Skip to content

Misleading log: TaskExit event in podsandbox handler #11836

@gjmzj

Description

@gjmzj

Description

repeating logs:

May 12 06:18:36 k8s-192-168-0-19 containerd[4600]: time="2025-05-12T06:18:36.852558301+08:00" level=info msg="TaskExit event in podsandbox handler container_id:\"e28786a1b2069e77762bb83efe638e000effc88ec196d7aec1749607e938264c\"  id:\"b91276d1e3ddc165d517311b17ada8e5b29f2bd4d5424b9a2f17a584ca6590e8\"  pid:477946  exited_at:{seconds:1747001916  nanos:852280084}"
May 12 06:18:43 k8s-192-168-0-19 containerd[4600]: time="2025-05-12T06:18:43.722993392+08:00" level=info msg="TaskExit event in podsandbox handler container_id:\"e28786a1b2069e77762bb83efe638e000effc88ec196d7aec1749607e938264c\"  id:\"2228ddd8b66d9705fd73cd9850d638ab052db1ce7959b3df8e9bde5e0e426560\"  pid:478032  exited_at:{seconds:1747001923  nanos:722699189}"
May 12 06:18:44 k8s-192-168-0-19 containerd[4600]: time="2025-05-12T06:18:44.884273469+08:00" level=info msg="TaskExit event in podsandbox handler container_id:\"284466cbf4654978540de374b862294f4f9329b1ca122251a6d06b2c7936c9f5\"  id:\"6708e2f926206c259eb76bb8ff5251f29484928f9e42ac73cf6453553ea08c93\"  pid:478061  exited_at:{seconds:1747001924  nanos:883886761}"
May 12 06:18:46 k8s-192-168-0-19 containerd[4600]: time="2025-05-12T06:18:46.107961441+08:00" level=info msg="TaskExit event in podsandbox handler container_id:\"284466cbf4654978540de374b862294f4f9329b1ca122251a6d06b2c7936c9f5\"  id:\"e016d5fa361d096344da1256e3411496a1a0f75f0bafa5fd2e24b46f3c71dbf7\"  pid:478083  exited_at:{seconds:1747001926  nanos:107755671}"
May 12 06:18:46 k8s-192-168-0-19 containerd[4600]: time="2025-05-12T06:18:46.858090634+08:00" level=info msg="TaskExit event in podsandbox handler container_id:\"e28786a1b2069e77762bb83efe638e000effc88ec196d7aec1749607e938264c\"  id:\"13a85adb96620ede3ab96482f452eea631d0983127380bdea91f1094f04b0fa3\"  pid:478104  exited_at:{seconds:1747001926  nanos:857795348}"

i found these two containers which are running and healthy.

root@k8s-192-168-0-19:~# crictl ps |grep e28786a
e28786a1b2069       c08601d8f8df7       9 hours ago         Running             calico-node               0                   06a4a2d9c4470       calico-node-7d7zm                          kube-system
root@k8s-192-168-0-19:~# crictl ps |grep 284466
284466cbf4654       a9718f9877332       8 hours ago         Running             calico-kube-controllers   0                   e17faa6df596d       calico-kube-controllers-54bb64fd9f-f8hsx   kube-system
root@k8s-192-168-0-19:~# kubectl describe pod -n kube-system calico-kube-controllers-54bb64fd9f-f8hsx
Name:                 calico-kube-controllers-54bb64fd9f-f8hsx
Namespace:            kube-system
Priority:             2000000000
Priority Class Name:  system-cluster-critical
Service Account:      calico-kube-controllers
Node:                 k8s-192-168-0-19/192.168.0.19
Start Time:           Sun, 11 May 2025 22:32:54 +0800
Labels:               k8s-app=calico-kube-controllers
                      pod-template-hash=54bb64fd9f
Annotations:          <none>
Status:               Running
IP:                   192.168.0.19
IPs:
  IP:           192.168.0.19
Controlled By:  ReplicaSet/calico-kube-controllers-54bb64fd9f
Containers:
  calico-kube-controllers:
    Container ID:   containerd://284466cbf4654978540de374b862294f4f9329b1ca122251a6d06b2c7936c9f5
    Image:          easzlab.io.local:5000/calico/kube-controllers:v3.28.4
    Image ID:       easzlab.io.local:5000/calico/kube-controllers@sha256:1c07cc091d70a525b305e84c3c83086046b71875fb07ffafb8154301cde1f2ee
    Port:           <none>
    Host Port:      <none>
    State:          Running
      Started:      Sun, 11 May 2025 22:32:55 +0800
    Ready:          True
    Restart Count:  0
    Liveness:       exec [/usr/bin/check-status -l] delay=10s timeout=10s period=10s #success=1 #failure=6
    Readiness:      exec [/usr/bin/check-status -r] delay=0s timeout=1s period=10s #success=1 #failure=3
    Environment:
      ETCD_ENDPOINTS:       <set to the key 'etcd_endpoints' of config map 'calico-config'>  Optional: false
      ETCD_CA_CERT_FILE:    <set to the key 'etcd_ca' of config map 'calico-config'>         Optional: false
      ETCD_KEY_FILE:        <set to the key 'etcd_key' of config map 'calico-config'>        Optional: false
      ETCD_CERT_FILE:       <set to the key 'etcd_cert' of config map 'calico-config'>       Optional: false
      ENABLED_CONTROLLERS:  policy,namespace,serviceaccount,workloadendpoint,node
    Mounts:
      /calico-secrets from etcd-certs (rw)
      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-n7mvg (ro)
Conditions:
  Type                        Status
  PodReadyToStartContainers   True
  Initialized                 True
  Ready                       True
  ContainersReady             True
  PodScheduled                True
Volumes:
  etcd-certs:
    Type:        Secret (a volume populated by a Secret)
    SecretName:  calico-etcd-secrets
    Optional:    false
  kube-api-access-n7mvg:
    Type:                    Projected (a volume that contains injected data from multiple sources)
    TokenExpirationSeconds:  3607
    ConfigMapName:           kube-root-ca.crt
    Optional:                false
    DownwardAPI:             true
QoS Class:                   BestEffort
Node-Selectors:              kubernetes.io/os=linux
Tolerations:                 CriticalAddonsOnly op=Exists
                             node-role.kubernetes.io/control-plane:NoSchedule
                             node-role.kubernetes.io/master:NoSchedule
                             node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
                             node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
Events:                      <none>

Steps to reproduce the issue

1.Download 'kubeasz' code, the binaries and offline images

export release=3.6.6
curl -C- -fLO --retry 3 https://github.com/easzlab/kubeasz/releases/download/${release}/ezdown
chmod +x ./ezdown
./ezdown -D -m standard

  1. install an all-in-one cluster

./ezdown -S
source ~/.bashrc
dk ezctl start-aio

Describe the results you received and expected

no 'TaskExit event' logs for a healthy pod

What version of containerd are you using?

containerd github.com/containerd/containerd/v2 v2.0.4 1a43cb6

Any other relevant information

runc version 1.2.6
commit: v1.2.6-0-ge89a2992
spec: 1.2.0
go: go1.23.7
libseccomp: 2.5.5
uname -a
Linux k8s-192-168-0-19 5.4.0-122-generic #138-Ubuntu SMP Wed Jun 22 15:00:31 UTC 2022 x86_64 x86_64 x86_64 GNU/Linux
crictl info
{
  "cniconfig": {
    "Networks": [
      {
        "Config": {
          "CNIVersion": "0.3.1",
          "Name": "cni-loopback",
          "Plugins": [
            {
              "Network": {
                "dns": {},
                "ipam": {},
                "type": "loopback"
              },
              "Source": "{\"type\":\"loopback\"}"
            }
          ],
          "Source": "{\n\"cniVersion\": \"0.3.1\",\n\"name\": \"cni-loopback\",\n\"plugins\": [{\n  \"type\": \"loopback\"\n}]\n}"
        },
        "IFName": "lo"
      },
      {
        "Config": {
          "CNIVersion": "0.3.1",
          "Name": "k8s-pod-network",
          "Plugins": [
            {
              "Network": {
                "dns": {},
                "ipam": {
                  "type": "calico-ipam"
                },
                "type": "calico"
              },
              "Source": "{\"etcd_ca_cert_file\":\"/etc/kubernetes/ssl/ca.pem\",\"etcd_cert_file\":\"/etc/calico/ssl/calico.pem\",\"etcd_endpoints\":\"https://192.168.0.19:2379\",\"etcd_key_file\":\"/etc/calico/ssl/calico-key.pem\",\"ipam\":{\"type\":\"calico-ipam\"},\"kubernetes\":{\"kubeconfig\":\"/etc/cni/net.d/calico-kubeconfig\"},\"log_file_path\":\"/var/log/calico/cni/cni.log\",\"log_level\":\"info\",\"mtu\":1500,\"policy\":{\"type\":\"k8s\"},\"type\":\"calico\"}"
            },
            {
              "Network": {
                "capabilities": {
                  "portMappings": true
                },
                "dns": {},
                "ipam": {},
                "type": "portmap"
              },
              "Source": "{\"capabilities\":{\"portMappings\":true},\"snat\":true,\"type\":\"portmap\"}"
            },
            {
              "Network": {
                "capabilities": {
                  "bandwidth": true
                },
                "dns": {},
                "ipam": {},
                "type": "bandwidth"
              },
              "Source": "{\"capabilities\":{\"bandwidth\":true},\"type\":\"bandwidth\"}"
            }
          ],
          "Source": "{\n  \"name\": \"k8s-pod-network\",\n  \"cniVersion\": \"0.3.1\",\n  \"plugins\": [\n    {\n      \"type\": \"calico\",\n      \"log_level\": \"info\",\n      \"log_file_path\": \"/var/log/calico/cni/cni.log\",\n      \"etcd_endpoints\": \"https://192.168.0.19:2379\",\n      \"etcd_key_file\": \"/etc/calico/ssl/calico-key.pem\",\n      \"etcd_cert_file\": \"/etc/calico/ssl/calico.pem\",\n      \"etcd_ca_cert_file\": \"/etc/kubernetes/ssl/ca.pem\",\n      \"mtu\": 1500,\n      \"ipam\": {\n          \"type\": \"calico-ipam\"\n      },\n      \"policy\": {\n          \"type\": \"k8s\"\n      },\n      \"kubernetes\": {\n          \"kubeconfig\": \"/etc/cni/net.d/calico-kubeconfig\"\n      }\n    },\n    {\n      \"type\": \"portmap\",\n      \"snat\": true,\n      \"capabilities\": {\"portMappings\": true}\n    },\n    {\n      \"type\": \"bandwidth\",\n      \"capabilities\": {\"bandwidth\": true}\n    }\n  ]\n}"
        },
        "IFName": "eth0"
      }
    ],
    "PluginConfDir": "/etc/cni/net.d",
    "PluginDirs": [
      "/opt/cni/bin"
    ],
    "PluginMaxConfNum": 1,
    "Prefix": "eth"
  },
  "config": {
    "cdiSpecDirs": [
      "/etc/cdi",
      "/var/run/cdi"
    ],
    "cni": {
      "binDir": "/opt/cni/bin",
      "confDir": "/etc/cni/net.d",
      "confTemplate": "/etc/cni/net.d/10-default.conf",
      "ipPref": "",
      "maxConfNum": 1,
      "setupSerially": false,
      "useInternalLoopback": false
    },
    "containerd": {
      "defaultRuntimeName": "runc",
      "ignoreBlockIONotEnabledErrors": false,
      "ignoreRdtNotEnabledErrors": false,
      "runtimes": {
        "runc": {
          "ContainerAnnotations": [],
          "PodAnnotations": [],
          "baseRuntimeSpec": "",
          "cniConfDir": "",
          "cniMaxConfNum": 0,
          "io_type": "",
          "options": {
            "BinaryName": "",
            "CriuImagePath": "",
            "CriuWorkPath": "",
            "IoGid": 0,
            "IoUid": 0,
            "NoNewKeyring": false,
            "Root": "",
            "ShimCgroup": "",
            "SystemdCgroup": true
          },
          "privileged_without_host_devices": false,
          "privileged_without_host_devices_all_devices_allowed": false,
          "runtimePath": "",
          "runtimeType": "io.containerd.runc.v2",
          "sandboxer": "podsandbox",
          "snapshotter": ""
        }
      }
    },
    "containerdEndpoint": "/run/containerd/containerd.sock",
    "containerdRootDir": "/var/lib/containerd",
    "device_ownership_from_security_context": false,
    "disableApparmor": false,
    "disableHugetlbController": true,
    "disableProcMount": false,
    "drainExecSyncIOTimeout": "0s",
    "enableCDI": true,
    "enableSelinux": false,
    "enableUnprivilegedICMP": true,
    "enableUnprivilegedPorts": true,
    "ignoreDeprecationWarnings": [],
    "ignoreImageDefinedVolumes": false,
    "maxContainerLogSize": 16384,
    "netnsMountsUnderStateDir": false,
    "restrictOOMScoreAdj": false,
    "rootDir": "/var/lib/containerd/io.containerd.grpc.v1.cri",
    "selinuxCategoryRange": 1024,
    "stateDir": "/run/containerd/io.containerd.grpc.v1.cri",
    "tolerateMissingHugetlbController": true,
    "unsetSeccompProfile": ""
  },
  "golang": "go1.23.7",
  "lastCNILoadStatus": "OK",
  "lastCNILoadStatus.default": "OK",
  "runtimeHandlers": [
    {
      "features": {
        "user_namespaces": true
      },
      "name": "runc"
    },
    {
      "features": {
        "user_namespaces": true
      }
    }
  ],
  "status": {
    "conditions": [
      {
        "message": "",
        "reason": "",
        "status": true,
        "type": "RuntimeReady"
      },
      {
        "message": "",
        "reason": "",
        "status": true,
        "type": "NetworkReady"
      },
      {
        "message": "{\"io.containerd.deprecation/cri-registry-configs\":\"The `configs` property of `[plugins.\\\"io.containerd.grpc.v1.cri\\\".registry]` is deprecated since containerd v1.5 and will be removed in containerd v2.1.Use `config_path` instead.\",\"io.containerd.deprecation/cri-registry-mirrors\":\"The `mirrors` property of `[plugins.\\\"io.containerd.grpc.v1.cri\\\".registry]` is deprecated since containerd v1.5 and will be removed in containerd v2.1.Use `config_path` instead.\"}",
        "reason": "ContainerdHasDeprecationWarnings",
        "status": false,
        "type": "ContainerdHasNoDeprecationWarnings"
      }
    ]
  }
}

Show configuration if it is related to CRI plugin.

No response

Metadata

Metadata

Assignees

No one assigned

    Type

    No type

    Projects

    Status

    Todo

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions