Kubeintel Logo

Kubeintel

  • Search
  • Pods
  • Deployments
  • Statefulsets
  • jobJobs
  • Daemonsets
  • Namespaces
  • Nodes
  • Services
  • Configmaps
  1. Home
  2. /
  3. namespaces
  4. /
  5. kube-system
  6. /
  7. pods
  8. /
  9. cilium-64w6x
  10. /
  11. logs
Summary
Metadata
Containers
Spec
Status
All
Events
Logs
Investigator
Pod Details

Name: cilium-64w6x

Namespace: kube-system

Status: Running

IP: 10.108.0.3

Node: system-0-65529

Ready: 1/1

Kubectl Commands
  • View
  • Delete
  • Describe
  • Debug
Containers
Name
Image
Ready
Restarts
...
cilium-agentghcr.io/digitalocean-packages/cilium:v1....Ready-
  • 1
Init Containers
Name
Image
Ready
Restarts
...
delay-cilium-for-ccmghcr.io/digitalocean-packages/cilium:v1....Completed-
configghcr.io/digitalocean-packages/cilium:v1....Completed-
mount-cgroupghcr.io/digitalocean-packages/cilium:v1....Completed-
apply-sysctl-overwritesghcr.io/digitalocean-packages/cilium:v1....Completed-
mount-bpf-fsghcr.io/digitalocean-packages/cilium:v1....Completed-
  • 1
  • 2
Metadata

Creation Time: 2025-04-17T22:03:56Z

Labels:

  • app.kubernetes.io/name: cilium-agent...
  • app.kubernetes.io/part-of: cilium...
  • controller-revision-hash: 79f45cdb77...
  • doks.digitalocean.com/managed: true...
  • k8s-app: cilium
  • kubernetes.io/cluster-service: true...
  • pod-template-generation: 6...

Annotation:

  • clusterlint.digitalocean.com/disabled-checks: privileged-container...
  • container.apparmor.security.beta.kubernetes.io/apply-sysctl-overwrites: unconfined...
  • container.apparmor.security.beta.kubernetes.io/cilium-agent: unconfined...
  • container.apparmor.security.beta.kubernetes.io/clean-cilium-state: unconfined...
  • container.apparmor.security.beta.kubernetes.io/mount-cgroup: unconfined...
  • kubectl.kubernetes.io/default-container: cilium-agent...
  • prometheus.io/port: 9090...
  • prometheus.io/scrape: true...
name: cilium-64w6x
generateName: cilium-
namespace: kube-system
uid: 1a08f218-0590-4d2b-b404-b8c446acf5be
resourceVersion: '92139188'
creationTimestamp: '2025-04-17T22:03:56Z'
labels:
app.kubernetes.io/name: cilium-agent
app.kubernetes.io/part-of: cilium
controller-revision-hash: 79f45cdb77
doks.digitalocean.com/managed: 'true'
k8s-app: cilium
kubernetes.io/cluster-service: 'true'
pod-template-generation: '6'
annotations:
clusterlint.digitalocean.com/disabled-checks: privileged-containers,non-root-user,resource-requirements,hostpath-volume
container.apparmor.security.beta.kubernetes.io/apply-sysctl-overwrites: unconfined
container.apparmor.security.beta.kubernetes.io/cilium-agent: unconfined
container.apparmor.security.beta.kubernetes.io/clean-cilium-state: unconfined
container.apparmor.security.beta.kubernetes.io/mount-cgroup: unconfined
kubectl.kubernetes.io/default-container: cilium-agent
prometheus.io/port: '9090'
prometheus.io/scrape: 'true'
ownerReferences:
- apiVersion: apps/v1
kind: DaemonSet
name: cilium
uid: f644a837-ae29-48a0-89c7-2d886e50903e
controller: true
blockOwnerDeletion: true
- name: cilium-agent
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium-agent
args:
- '--config-dir=/tmp/cilium/config-map'
- >-
--k8s-api-server=https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- '--ipv4-native-routing-cidr=10.244.0.0/16'
ports:
- name: peer-service
hostPort: 4244
containerPort: 4244
protocol: TCP
- name: prometheus
hostPort: 9090
containerPort: 9090
protocol: TCP
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: CILIUM_CLUSTERMESH_CONFIG
value: /var/lib/cilium/clustermesh/
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 300m
memory: 300Mi
volumeMounts:
- name: host-proc-sys-net
mountPath: /host/proc/sys/net
- name: host-proc-sys-kernel
mountPath: /host/proc/sys/kernel
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: etc-cni-netd
mountPath: /host/etc/cni/net.d
- name: clustermesh-secrets
readOnly: true
mountPath: /var/lib/cilium/clustermesh
- name: lib-modules
readOnly: true
mountPath: /lib/modules
- name: xtables-lock
mountPath: /run/xtables.lock
- name: hubble-tls
readOnly: true
mountPath: /var/lib/cilium/tls/hubble
- name: tmp
mountPath: /tmp
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
livenessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
initialDelaySeconds: 120
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 10
readinessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 3
startupProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 1
periodSeconds: 2
successThreshold: 1
failureThreshold: 105
lifecycle:
postStart:
exec:
command:
- bash
- '-c'
- >
set -o errexit
set -o pipefail
set -o nounset
# When running in AWS ENI mode, it's likely that 'aws-node' has
# had a chance to install SNAT iptables rules. These can result
# in dropped traffic, so we should attempt to remove them.
# We do it using a 'postStart' hook since this may need to run
# for nodes which might have already been init'ed but may still
# have dangling rules. This is safe because there are no
# dependencies on anything that is part of the startup script
# itself, and can be safely run multiple times per node (e.g. in
# case of a restart).
if [[ "$(iptables-save | grep -E -c
'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN')" != "0" ]];
then
echo 'Deleting iptables rules created by the AWS CNI VPC plugin'
iptables-save | grep -E -v 'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN' | iptables-restore
fi
echo 'Done!'
preStop:
exec:
command:
- /cni-uninstall.sh
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- CHOWN
- KILL
- NET_ADMIN
- NET_RAW
- IPC_LOCK
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
- DAC_OVERRIDE
- FOWNER
- SETGID
- SETUID
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
volumes:
- name: host-kubectl
hostPath:
path: /usr/bin/kubectl
type: File
- name: tmp
emptyDir: {}
- name: cilium-run
hostPath:
path: /var/run/cilium
type: DirectoryOrCreate
- name: bpf-maps
hostPath:
path: /sys/fs/bpf
type: DirectoryOrCreate
- name: hostproc
hostPath:
path: /proc
type: Directory
- name: cilium-cgroup
hostPath:
path: /run/cilium/cgroupv2
type: DirectoryOrCreate
- name: cni-path
hostPath:
path: /opt/cni/bin
type: DirectoryOrCreate
- name: etc-cni-netd
hostPath:
path: /etc/cni/net.d
type: DirectoryOrCreate
- name: lib-modules
hostPath:
path: /lib/modules
type: ''
- name: xtables-lock
hostPath:
path: /run/xtables.lock
type: FileOrCreate
- name: clustermesh-secrets
projected:
sources:
- secret:
name: cilium-clustermesh
optional: true
- secret:
name: clustermesh-apiserver-remote-cert
items:
- key: tls.key
path: common-etcd-client.key
- key: tls.crt
path: common-etcd-client.crt
- key: ca.crt
path: common-etcd-client-ca.crt
optional: true
defaultMode: 256
- name: host-proc-sys-net
hostPath:
path: /proc/sys/net
type: Directory
- name: host-proc-sys-kernel
hostPath:
path: /proc/sys/kernel
type: Directory
- name: hubble-tls
projected:
sources:
- secret:
name: hubble-server-certs
items:
- key: tls.crt
path: server.crt
- key: tls.key
path: server.key
- key: ca.crt
path: client-ca.crt
optional: true
defaultMode: 256
- name: kube-api-access-cknjj
projected:
sources:
- serviceAccountToken:
expirationSeconds: 3607
path: token
- configMap:
name: kube-root-ca.crt
items:
- key: ca.crt
path: ca.crt
- downwardAPI:
items:
- path: namespace
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
defaultMode: 420
initContainers:
- name: delay-cilium-for-ccm
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- bash
- '-e'
- '-c'
- >
# This will get the node object for the local node and search through
# the assigned addresses in the object in order to check whether CCM
# already set the internal AND external IP since cilium needs both
# for a clean startup.
# The grep matches regardless of the order of IPs.
until /host/usr/bin/kubectl get node ${HOSTNAME} -o
jsonpath="{.status.addresses[*].type}" | grep -E
"InternalIP.*ExternalIP|ExternalIP.*InternalIP"; do echo "waiting for
CCM to store internal and external IP addresses in node object:
${HOSTNAME}" && sleep 3; done;
env:
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 100m
memory: 100Mi
volumeMounts:
- name: host-kubectl
mountPath: /host/usr/bin/kubectl
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
imagePullPolicy: IfNotPresent
- name: config
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium
- build-config
- '--source=config-map:cilium-config'
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: tmp
mountPath: /tmp
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
- name: mount-cgroup
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- >
cp /usr/bin/cilium-mount /hostbin/cilium-mount;
nsenter --cgroup=/hostproc/1/ns/cgroup --mount=/hostproc/1/ns/mnt
"${BIN_PATH}/cilium-mount" $CGROUP_ROOT;
rm /hostbin/cilium-mount
env:
- name: CGROUP_ROOT
value: /run/cilium/cgroupv2
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: apply-sysctl-overwrites
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- |
cp /usr/bin/cilium-sysctlfix /hostbin/cilium-sysctlfix;
nsenter --mount=/hostproc/1/ns/mnt "${BIN_PATH}/cilium-sysctlfix";
rm /hostbin/cilium-sysctlfix
env:
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: mount-bpf-fs
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /bin/bash
- '-c'
- '--'
args:
- mount | grep "/sys/fs/bpf type bpf" || mount -t bpf bpf /sys/fs/bpf
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: Bidirectional
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
privileged: true
- name: clean-cilium-state
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /init-container.sh
env:
- name: CILIUM_ALL_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-state
optional: true
- name: CILIUM_BPF_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-bpf-state
optional: true
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
- name: cilium-cgroup
mountPath: /run/cilium/cgroupv2
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- NET_ADMIN
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: install-cni-binaries
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /install-plugin.sh
resources:
requests:
cpu: 100m
memory: 10Mi
volumeMounts:
- name: cni-path
mountPath: /host/opt/cni/bin
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
containers:
- name: cilium-agent
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium-agent
args:
- '--config-dir=/tmp/cilium/config-map'
- >-
--k8s-api-server=https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- '--ipv4-native-routing-cidr=10.244.0.0/16'
ports:
- name: peer-service
hostPort: 4244
containerPort: 4244
protocol: TCP
- name: prometheus
hostPort: 9090
containerPort: 9090
protocol: TCP
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: CILIUM_CLUSTERMESH_CONFIG
value: /var/lib/cilium/clustermesh/
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 300m
memory: 300Mi
volumeMounts:
- name: host-proc-sys-net
mountPath: /host/proc/sys/net
- name: host-proc-sys-kernel
mountPath: /host/proc/sys/kernel
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: etc-cni-netd
mountPath: /host/etc/cni/net.d
- name: clustermesh-secrets
readOnly: true
mountPath: /var/lib/cilium/clustermesh
- name: lib-modules
readOnly: true
mountPath: /lib/modules
- name: xtables-lock
mountPath: /run/xtables.lock
- name: hubble-tls
readOnly: true
mountPath: /var/lib/cilium/tls/hubble
- name: tmp
mountPath: /tmp
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
livenessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
initialDelaySeconds: 120
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 10
readinessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 3
startupProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 1
periodSeconds: 2
successThreshold: 1
failureThreshold: 105
lifecycle:
postStart:
exec:
command:
- bash
- '-c'
- >
set -o errexit
set -o pipefail
set -o nounset
# When running in AWS ENI mode, it's likely that 'aws-node' has
# had a chance to install SNAT iptables rules. These can result
# in dropped traffic, so we should attempt to remove them.
# We do it using a 'postStart' hook since this may need to run
# for nodes which might have already been init'ed but may still
# have dangling rules. This is safe because there are no
# dependencies on anything that is part of the startup script
# itself, and can be safely run multiple times per node (e.g. in
# case of a restart).
if [[ "$(iptables-save | grep -E -c
'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN')" != "0" ]];
then
echo 'Deleting iptables rules created by the AWS CNI VPC plugin'
iptables-save | grep -E -v 'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN' | iptables-restore
fi
echo 'Done!'
preStop:
exec:
command:
- /cni-uninstall.sh
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- CHOWN
- KILL
- NET_ADMIN
- NET_RAW
- IPC_LOCK
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
- DAC_OVERRIDE
- FOWNER
- SETGID
- SETUID
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
restartPolicy: Always
terminationGracePeriodSeconds: 1
dnsPolicy: ClusterFirst
nodeSelector:
kubernetes.io/os: linux
serviceAccountName: cilium
serviceAccount: cilium
automountServiceAccountToken: true
nodeName: system-0-65529
hostNetwork: true
securityContext: {}
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchFields:
- key: metadata.name
operator: In
values:
- system-0-65529
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
k8s-app: cilium
topologyKey: kubernetes.io/hostname
schedulerName: default-scheduler
tolerations:
- operator: Exists
- key: node.kubernetes.io/not-ready
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/unreachable
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/disk-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/memory-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/pid-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/unschedulable
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/network-unavailable
operator: Exists
effect: NoSchedule
priorityClassName: system-node-critical
priority: 2000001000
enableServiceLinks: true
preemptionPolicy: PreemptLowerPriority
phase: Running
conditions:
- type: PodReadyToStartContainers
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:16Z'
- type: Initialized
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:22Z'
- type: Ready
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:27Z'
- type: ContainersReady
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:27Z'
- type: PodScheduled
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:03:56Z'
hostIP: 10.108.0.3
podIP: 10.108.0.3
podIPs:
- ip: 10.108.0.3
startTime: '2025-04-17T22:03:58Z'
initContainerStatuses:
- name: delay-cilium-for-ccm
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:16Z'
finishedAt: '2025-04-17T22:04:16Z'
containerID: >-
containerd://a69d2e04a08a02601b45f58bdd69330ffbdd88b50afdc6f21c8763b0646dc70a
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://a69d2e04a08a02601b45f58bdd69330ffbdd88b50afdc6f21c8763b0646dc70a
started: false
- name: config
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:17Z'
finishedAt: '2025-04-17T22:04:17Z'
containerID: >-
containerd://e769f122905893ec8273ed3c3d7afa1b891ab9173bfba78df92297a25141bc91
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://e769f122905893ec8273ed3c3d7afa1b891ab9173bfba78df92297a25141bc91
started: false
- name: mount-cgroup
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:18Z'
finishedAt: '2025-04-17T22:04:18Z'
containerID: >-
containerd://06360cad998785bc374372480132ec7a1bf1466bef3f02fb128cf538e9308ac0
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://06360cad998785bc374372480132ec7a1bf1466bef3f02fb128cf538e9308ac0
started: false
- name: apply-sysctl-overwrites
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:19Z'
finishedAt: '2025-04-17T22:04:19Z'
containerID: >-
containerd://7811b02e0912508ba305ce1c374923766b1896331f8cd67ab386ed45c46e0648
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://7811b02e0912508ba305ce1c374923766b1896331f8cd67ab386ed45c46e0648
started: false
- name: mount-bpf-fs
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:20Z'
finishedAt: '2025-04-17T22:04:20Z'
containerID: >-
containerd://c7767d483d4f7e8c0d33102b1009ac27a807f527fc25af427e961644022a6450
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://c7767d483d4f7e8c0d33102b1009ac27a807f527fc25af427e961644022a6450
started: false
- name: clean-cilium-state
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:21Z'
finishedAt: '2025-04-17T22:04:21Z'
containerID: >-
containerd://143fb41038543dbd04fd580c8703b45d1a5d4f7f276ae7260df6edcc3f5a72e0
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://143fb41038543dbd04fd580c8703b45d1a5d4f7f276ae7260df6edcc3f5a72e0
started: false
- name: install-cni-binaries
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:22Z'
finishedAt: '2025-04-17T22:04:22Z'
containerID: >-
containerd://c90a1c7824e1bd84e55d5f2fd9a7dda3e77397c09f37355fbb9ad92dbc818d2c
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://c90a1c7824e1bd84e55d5f2fd9a7dda3e77397c09f37355fbb9ad92dbc818d2c
started: false
containerStatuses:
- name: cilium-agent
state:
running:
startedAt: '2025-04-17T22:04:23Z'
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://691cfa3f558f9fc549743ce6bede609254190c5b2c741e1de2a709d8e2a479ad
started: true
qosClass: Burstable
metadata:
name: cilium-64w6x
generateName: cilium-
namespace: kube-system
uid: 1a08f218-0590-4d2b-b404-b8c446acf5be
resourceVersion: '92139188'
creationTimestamp: '2025-04-17T22:03:56Z'
labels:
app.kubernetes.io/name: cilium-agent
app.kubernetes.io/part-of: cilium
controller-revision-hash: 79f45cdb77
doks.digitalocean.com/managed: 'true'
k8s-app: cilium
kubernetes.io/cluster-service: 'true'
pod-template-generation: '6'
annotations:
clusterlint.digitalocean.com/disabled-checks: privileged-containers,non-root-user,resource-requirements,hostpath-volume
container.apparmor.security.beta.kubernetes.io/apply-sysctl-overwrites: unconfined
container.apparmor.security.beta.kubernetes.io/cilium-agent: unconfined
container.apparmor.security.beta.kubernetes.io/clean-cilium-state: unconfined
container.apparmor.security.beta.kubernetes.io/mount-cgroup: unconfined
kubectl.kubernetes.io/default-container: cilium-agent
prometheus.io/port: '9090'
prometheus.io/scrape: 'true'
ownerReferences:
- apiVersion: apps/v1
kind: DaemonSet
name: cilium
uid: f644a837-ae29-48a0-89c7-2d886e50903e
controller: true
blockOwnerDeletion: true
spec:
volumes:
- name: host-kubectl
hostPath:
path: /usr/bin/kubectl
type: File
- name: tmp
emptyDir: {}
- name: cilium-run
hostPath:
path: /var/run/cilium
type: DirectoryOrCreate
- name: bpf-maps
hostPath:
path: /sys/fs/bpf
type: DirectoryOrCreate
- name: hostproc
hostPath:
path: /proc
type: Directory
- name: cilium-cgroup
hostPath:
path: /run/cilium/cgroupv2
type: DirectoryOrCreate
- name: cni-path
hostPath:
path: /opt/cni/bin
type: DirectoryOrCreate
- name: etc-cni-netd
hostPath:
path: /etc/cni/net.d
type: DirectoryOrCreate
- name: lib-modules
hostPath:
path: /lib/modules
type: ''
- name: xtables-lock
hostPath:
path: /run/xtables.lock
type: FileOrCreate
- name: clustermesh-secrets
projected:
sources:
- secret:
name: cilium-clustermesh
optional: true
- secret:
name: clustermesh-apiserver-remote-cert
items:
- key: tls.key
path: common-etcd-client.key
- key: tls.crt
path: common-etcd-client.crt
- key: ca.crt
path: common-etcd-client-ca.crt
optional: true
defaultMode: 256
- name: host-proc-sys-net
hostPath:
path: /proc/sys/net
type: Directory
- name: host-proc-sys-kernel
hostPath:
path: /proc/sys/kernel
type: Directory
- name: hubble-tls
projected:
sources:
- secret:
name: hubble-server-certs
items:
- key: tls.crt
path: server.crt
- key: tls.key
path: server.key
- key: ca.crt
path: client-ca.crt
optional: true
defaultMode: 256
- name: kube-api-access-cknjj
projected:
sources:
- serviceAccountToken:
expirationSeconds: 3607
path: token
- configMap:
name: kube-root-ca.crt
items:
- key: ca.crt
path: ca.crt
- downwardAPI:
items:
- path: namespace
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
defaultMode: 420
initContainers:
- name: delay-cilium-for-ccm
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- bash
- '-e'
- '-c'
- >
# This will get the node object for the local node and search through
# the assigned addresses in the object in order to check whether CCM
# already set the internal AND external IP since cilium needs both
# for a clean startup.
# The grep matches regardless of the order of IPs.
until /host/usr/bin/kubectl get node ${HOSTNAME} -o
jsonpath="{.status.addresses[*].type}" | grep -E
"InternalIP.*ExternalIP|ExternalIP.*InternalIP"; do echo "waiting for
CCM to store internal and external IP addresses in node object:
${HOSTNAME}" && sleep 3; done;
env:
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 100m
memory: 100Mi
volumeMounts:
- name: host-kubectl
mountPath: /host/usr/bin/kubectl
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
imagePullPolicy: IfNotPresent
- name: config
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium
- build-config
- '--source=config-map:cilium-config'
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: tmp
mountPath: /tmp
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
- name: mount-cgroup
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- >
cp /usr/bin/cilium-mount /hostbin/cilium-mount;
nsenter --cgroup=/hostproc/1/ns/cgroup --mount=/hostproc/1/ns/mnt
"${BIN_PATH}/cilium-mount" $CGROUP_ROOT;
rm /hostbin/cilium-mount
env:
- name: CGROUP_ROOT
value: /run/cilium/cgroupv2
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: apply-sysctl-overwrites
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- |
cp /usr/bin/cilium-sysctlfix /hostbin/cilium-sysctlfix;
nsenter --mount=/hostproc/1/ns/mnt "${BIN_PATH}/cilium-sysctlfix";
rm /hostbin/cilium-sysctlfix
env:
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: mount-bpf-fs
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /bin/bash
- '-c'
- '--'
args:
- mount | grep "/sys/fs/bpf type bpf" || mount -t bpf bpf /sys/fs/bpf
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: Bidirectional
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
privileged: true
- name: clean-cilium-state
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /init-container.sh
env:
- name: CILIUM_ALL_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-state
optional: true
- name: CILIUM_BPF_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-bpf-state
optional: true
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
- name: cilium-cgroup
mountPath: /run/cilium/cgroupv2
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- NET_ADMIN
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: install-cni-binaries
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /install-plugin.sh
resources:
requests:
cpu: 100m
memory: 10Mi
volumeMounts:
- name: cni-path
mountPath: /host/opt/cni/bin
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
containers:
- name: cilium-agent
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium-agent
args:
- '--config-dir=/tmp/cilium/config-map'
- >-
--k8s-api-server=https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- '--ipv4-native-routing-cidr=10.244.0.0/16'
ports:
- name: peer-service
hostPort: 4244
containerPort: 4244
protocol: TCP
- name: prometheus
hostPort: 9090
containerPort: 9090
protocol: TCP
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: CILIUM_CLUSTERMESH_CONFIG
value: /var/lib/cilium/clustermesh/
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 300m
memory: 300Mi
volumeMounts:
- name: host-proc-sys-net
mountPath: /host/proc/sys/net
- name: host-proc-sys-kernel
mountPath: /host/proc/sys/kernel
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: etc-cni-netd
mountPath: /host/etc/cni/net.d
- name: clustermesh-secrets
readOnly: true
mountPath: /var/lib/cilium/clustermesh
- name: lib-modules
readOnly: true
mountPath: /lib/modules
- name: xtables-lock
mountPath: /run/xtables.lock
- name: hubble-tls
readOnly: true
mountPath: /var/lib/cilium/tls/hubble
- name: tmp
mountPath: /tmp
- name: kube-api-access-cknjj
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
livenessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
initialDelaySeconds: 120
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 10
readinessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 3
startupProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 1
periodSeconds: 2
successThreshold: 1
failureThreshold: 105
lifecycle:
postStart:
exec:
command:
- bash
- '-c'
- >
set -o errexit
set -o pipefail
set -o nounset
# When running in AWS ENI mode, it's likely that 'aws-node' has
# had a chance to install SNAT iptables rules. These can result
# in dropped traffic, so we should attempt to remove them.
# We do it using a 'postStart' hook since this may need to run
# for nodes which might have already been init'ed but may still
# have dangling rules. This is safe because there are no
# dependencies on anything that is part of the startup script
# itself, and can be safely run multiple times per node (e.g. in
# case of a restart).
if [[ "$(iptables-save | grep -E -c
'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN')" != "0" ]];
then
echo 'Deleting iptables rules created by the AWS CNI VPC plugin'
iptables-save | grep -E -v 'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN' | iptables-restore
fi
echo 'Done!'
preStop:
exec:
command:
- /cni-uninstall.sh
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- CHOWN
- KILL
- NET_ADMIN
- NET_RAW
- IPC_LOCK
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
- DAC_OVERRIDE
- FOWNER
- SETGID
- SETUID
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
restartPolicy: Always
terminationGracePeriodSeconds: 1
dnsPolicy: ClusterFirst
nodeSelector:
kubernetes.io/os: linux
serviceAccountName: cilium
serviceAccount: cilium
automountServiceAccountToken: true
nodeName: system-0-65529
hostNetwork: true
securityContext: {}
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchFields:
- key: metadata.name
operator: In
values:
- system-0-65529
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
k8s-app: cilium
topologyKey: kubernetes.io/hostname
schedulerName: default-scheduler
tolerations:
- operator: Exists
- key: node.kubernetes.io/not-ready
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/unreachable
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/disk-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/memory-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/pid-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/unschedulable
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/network-unavailable
operator: Exists
effect: NoSchedule
priorityClassName: system-node-critical
priority: 2000001000
enableServiceLinks: true
preemptionPolicy: PreemptLowerPriority
status:
phase: Running
conditions:
- type: PodReadyToStartContainers
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:16Z'
- type: Initialized
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:22Z'
- type: Ready
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:27Z'
- type: ContainersReady
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:27Z'
- type: PodScheduled
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:03:56Z'
hostIP: 10.108.0.3
podIP: 10.108.0.3
podIPs:
- ip: 10.108.0.3
startTime: '2025-04-17T22:03:58Z'
initContainerStatuses:
- name: delay-cilium-for-ccm
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:16Z'
finishedAt: '2025-04-17T22:04:16Z'
containerID: >-
containerd://a69d2e04a08a02601b45f58bdd69330ffbdd88b50afdc6f21c8763b0646dc70a
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://a69d2e04a08a02601b45f58bdd69330ffbdd88b50afdc6f21c8763b0646dc70a
started: false
- name: config
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:17Z'
finishedAt: '2025-04-17T22:04:17Z'
containerID: >-
containerd://e769f122905893ec8273ed3c3d7afa1b891ab9173bfba78df92297a25141bc91
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://e769f122905893ec8273ed3c3d7afa1b891ab9173bfba78df92297a25141bc91
started: false
- name: mount-cgroup
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:18Z'
finishedAt: '2025-04-17T22:04:18Z'
containerID: >-
containerd://06360cad998785bc374372480132ec7a1bf1466bef3f02fb128cf538e9308ac0
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://06360cad998785bc374372480132ec7a1bf1466bef3f02fb128cf538e9308ac0
started: false
- name: apply-sysctl-overwrites
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:19Z'
finishedAt: '2025-04-17T22:04:19Z'
containerID: >-
containerd://7811b02e0912508ba305ce1c374923766b1896331f8cd67ab386ed45c46e0648
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://7811b02e0912508ba305ce1c374923766b1896331f8cd67ab386ed45c46e0648
started: false
- name: mount-bpf-fs
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:20Z'
finishedAt: '2025-04-17T22:04:20Z'
containerID: >-
containerd://c7767d483d4f7e8c0d33102b1009ac27a807f527fc25af427e961644022a6450
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://c7767d483d4f7e8c0d33102b1009ac27a807f527fc25af427e961644022a6450
started: false
- name: clean-cilium-state
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:21Z'
finishedAt: '2025-04-17T22:04:21Z'
containerID: >-
containerd://143fb41038543dbd04fd580c8703b45d1a5d4f7f276ae7260df6edcc3f5a72e0
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://143fb41038543dbd04fd580c8703b45d1a5d4f7f276ae7260df6edcc3f5a72e0
started: false
- name: install-cni-binaries
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:04:22Z'
finishedAt: '2025-04-17T22:04:22Z'
containerID: >-
containerd://c90a1c7824e1bd84e55d5f2fd9a7dda3e77397c09f37355fbb9ad92dbc818d2c
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://c90a1c7824e1bd84e55d5f2fd9a7dda3e77397c09f37355fbb9ad92dbc818d2c
started: false
containerStatuses:
- name: cilium-agent
state:
running:
startedAt: '2025-04-17T22:04:23Z'
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://691cfa3f558f9fc549743ce6bede609254190c5b2c741e1de2a709d8e2a479ad
started: true
qosClass: Burstable
time="2025-04-17T22:04:23Z" level=info msg="Memory available for map entries (0.003% of 4105375744B): 10263439B" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-ct-global-tcp-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-ct-global-any-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-nat-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-neigh-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-sock-rev-map-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:04:23Z" level=info msg=" --agent-health-port='9879'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-labels=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-liveness-update-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-not-ready-taint-key='node.cilium.io/agent-not-ready'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allocator-list-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allow-icmp-frag-needed='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allow-localhost='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --annotate-k8s-node='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --api-rate-limit=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --arping-refresh-period='30s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --auto-create-cilium-node-resource='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --auto-direct-node-routes='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-announce-lb-ip='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-announce-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-config-path='/var/lib/cilium/bgp/config.yaml'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-auth-map-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-global-any-max='262144'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-global-tcp-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-any='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp-fin='10s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp-syn='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-any='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-tcp-grace='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-filter-priority='1'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-fragments-map-max='8192'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-affinity-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-algorithm='random'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dev-ip-addr-inherit=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dsr-dispatch='opt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dsr-l4-xlate='frontend'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-external-clusterip='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-hash-seed='JLfvgnHc2kaSUFaI'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-table-size='16381'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-map-max='65536'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-mode='snat'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-proto-diff='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rev-nat-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rss-ipv4-src-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rss-ipv6-src-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-service-backend-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-service-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-sock='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-sock-hostns-only='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-source-range-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-map-dynamic-size-ratio='0.0025'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-map-event-buffers=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-nat-global-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-neigh-global-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-policy-map-full-reconciliation-interval='15m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-root='/sys/fs/bpf'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-sock-rev-map-max='262144'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bypass-ip-availability-upon-restore='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --certificates-directory='/var/run/cilium/certs'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cflags=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cgroup-root='/run/cilium/cgroupv2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cilium-endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-health-port='4240'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-id='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-name='default'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-pool-ipv4-mask-size='25'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --clustermesh-config='/var/lib/cilium/clustermesh/'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --clustermesh-ip-identities-sync-timeout='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cmdref=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-chaining-mode='portmap'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-chaining-target=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-exclusive='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-external-routing='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-log-file='/var/run/cilium/cilium-cni.log'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config-dir='/tmp/cilium/config-map'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config-sources='config-map:kube-system/cilium-config'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --conntrack-gc-interval='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --conntrack-gc-max-interval='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --container-ip-local-reserved-ports='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --crd-wait-timeout='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --custom-cni-conf='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --datapath-mode='veth'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --debug='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --debug-verbose=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --derive-masquerade-ip-addr-from-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --devices=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --direct-routing-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-cnp-status-updates='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-endpoint-crd='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-envoy-version-check='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-iptables-feeder-rules=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dns-max-ips-per-restored-rule='1000'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dns-policy-unload-on-shutdown='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-concurrency-limit='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-concurrency-processing-grace-period='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-enable-transparent-mode='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-insecure-skip-transparent-mode-check='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-lock-count='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-lock-timeout='500ms'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-socket-linger-timeout='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-gateway-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-gateway-reconciliation-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-masquerade-interfaces=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-multi-home-ip-rule-compat='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-auto-protect-node-port-range='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bandwidth-manager='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bbr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bgp-control-plane='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-clock-probe='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-masquerade='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-tproxy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-api-server-access='*'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-endpoint-slice='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-health-api-server-access='*'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-custom-calls='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-endpoint-health-checking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-endpoint-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-envoy-config='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-external-ips='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-health-check-nodeport='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-health-checking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-high-scale-ipcache='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-firewall='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-legacy-routing='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-port='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-hubble='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-hubble-recorder-api='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-icmp-rules='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-identity-mark='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ip-masq-agent='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec-key-watcher='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec-xfrm-state-caching='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-big-tcp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-egress-gateway='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-fragment-tracking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-masquerade='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-big-tcp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-masquerade='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-ndp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-api-discovery='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-endpoint-slice='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-event-handover='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-networkpolicy='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-terminating-endpoint='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-announcements='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-neigh-discovery='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-pod-announcements='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l7-proxy='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-local-node-route='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-local-redirect-policy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-mke='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-monitor='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-nat46x64-gateway='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-node-port='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-pmtu-discovery='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-policy='default'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-recorder='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-remote-node-identity='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-runtime-device-detection='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-sctp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-service-topology='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-session-affinity='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-srv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-stale-cilium-endpoint-cleanup='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-svc-source-range-check='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-tracing='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-unreachable-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-vtep='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-well-known-identities='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-wireguard='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-wireguard-userspace-fallback='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-xdp-prefilter='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-xt-socket-fallback='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --encrypt-interface=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --encrypt-node='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-queue-size='25'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-status=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --envoy-config-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --envoy-log=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --exclude-local-address=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --external-envoy-proxy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --fixed-identity-mapping=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --fqdn-regex-compile-lru-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --gops-port='9890'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-403-msg=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-idle-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-max-grpc-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-normalize-path='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-request-timeout='3600'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-retry-count='3'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-retry-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-disable-tls='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-event-buffer-capacity='4095'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-event-queue-size='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-compress='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-max-backups='5'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-max-size-mb='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-path=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-listen-address=':4244'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-metrics=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-metrics-server=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-monitor-events=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-prefer-ipv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-recorder-sink-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-recorder-storage-path='/var/run/cilium/pcaps'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-skip-unknown-cgroup-ids='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-socket-path='/var/run/cilium/hubble.sock'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-cert-file='/var/lib/cilium/tls/hubble/server.crt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-client-ca-files='/var/lib/cilium/tls/hubble/client-ca.crt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-key-file='/var/lib/cilium/tls/hubble/server.key'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-allocation-mode='crd'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-change-grace-period='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-gc-interval='5m'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-heartbeat-timeout='15m'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-restore-grace-period='10m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-egress-gateway-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-iptables-rules='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-no-conntrack-iptables-rules='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ip-allocation-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ip-masq-agent-config-path='/etc/config/ip-masq-agent'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam='cluster-pool'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam-cilium-node-update-rate='15s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam-multi-pool-pre-allocation='default=8'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipsec-key-file=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipsec-key-rotation-duration='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --iptables-lock-timeout='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --iptables-random-fully='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-native-routing-cidr='10.244.0.0/16'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-node='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-pod-subnets=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-service-loopback-address='169.254.42.1'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-service-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-cluster-alloc-cidr='f00d::/64'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-mcast-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-native-routing-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-node='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-pod-subnets=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-service-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --join-cluster='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-api-server='https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-client-burst='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-client-qps='5'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-heartbeat-timeout='30s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-kubeconfig-path=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-namespace='kube-system'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-require-ipv4-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-require-ipv6-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-service-cache-size='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-service-proxy-name=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-sync-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-watcher-endpoint-selector='metadata.name!=kube-scheduler,metadata.name!=kube-controller-manager,metadata.name!=etcd-operator,metadata.name!=gcp-controller-manager'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --keep-config='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kube-proxy-replacement='partial'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kube-proxy-replacement-healthz-bind-address=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-connectivity-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-lease-ttl='15m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-max-consecutive-quorum-errors='2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-opt=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-periodic-sync='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-lease-duration='15s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-renew-deadline='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-retry-period='2s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-pod-announcements-interface=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --label-prefix-file=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --labels=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --lib-dir='/var/lib/cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-max-addr-scope='252'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-router-ipv4=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-router-ipv6=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-driver=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-opt=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-system-load='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --max-controller-interval='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-enabled='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-mutual-listener-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-rotated-identities-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-signal-backoff-duration='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-spiffe-trust-domain='spiffe.cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-spire-admin-socket=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --metrics='+cilium_bpf_map_pressure'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mke-cgroup-mount=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation='medium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation-flags='all'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation-interval='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-queue-size='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mtu='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-encryption-opt-out-labels='node-role.kubernetes.io/control-plane'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-algorithm='random'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-bind-protection='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-mode='snat'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-range='30000,32767'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --nodes-gc-interval='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --operator-api-serve-addr='127.0.0.1:9234'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-audit-mode='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-queue-size='100'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof-address='localhost'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof-port='6060'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --preallocate-bpf-maps='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --prepend-iptables-chains='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --procfs='/host/proc'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --prometheus-serve-addr=':9090'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-connect-timeout='2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-gid='1337'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-idle-timeout-seconds='60'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-max-connection-duration-seconds='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-max-requests-per-connection='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-prometheus-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-xff-num-trusted-hops-egress='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-xff-num-trusted-hops-ingress='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --read-cni-conf=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --remove-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --restore='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --restored-proxy-ports-age-limit='15'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --route-metric='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --routing-mode='native'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --set-cilium-is-up-condition='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --set-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --sidecar-istio-proxy-image='cilium/istio_proxy'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --single-cluster-route='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --skip-cnp-status-startup-clean='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --socket-path='/var/run/cilium/cilium.sock'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --srv6-encap-mode='reduced'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --state-dir='/var/run/cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --synchronize-k8s-nodes='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-dns-reject-response-code='refused'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-enable-dns-compression='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-endpoint-max-ip-per-hostname='50'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-idle-connection-grace-period='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-max-deferred-connection-deletes='10000'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-min-ttl='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-pre-cache=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-proxy-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-proxy-response-max-delay='100ms'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --trace-payloadlen='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --trace-sock='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel-protocol='vxlan'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --unmanaged-pod-watcher-interval='15'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --use-cilium-internal-ip-for-ipsec='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --version='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vlan-bpf-bypass=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-endpoint=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-mac=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-mask=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --wireguard-encapsulate='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --write-cni-conf-when-ready='/host/etc/cni/net.d/05-cilium.conflist'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" _ _ _" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" ___|_| |_|_ _ _____" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="| _| | | | | | |" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="|___|_|_|_|___|_|_|_|" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="Cilium 1.14.18 5418622a22 2024-07-03T11:57:56+02:00 go version go1.22.10 linux/amd64" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="clang (10.0.0) and kernel (6.1.0) versions: OK!" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="linking environment: OK!" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="Kernel config file not found: if the agent fails to start, check the system requirements at https://docs.cilium.io/en/stable/operations/system_requirements" subsys=probes
time="2025-04-17T22:04:23Z" level=info msg="Detected mounted BPF filesystem at /sys/fs/bpf" subsys=bpf
time="2025-04-17T22:04:23Z" level=info msg="Mounted cgroupv2 filesystem at /run/cilium/cgroupv2" subsys=cgroups
time="2025-04-17T22:04:23Z" level=info msg="Parsing base label prefixes from default label list" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg="Parsing additional label prefixes from user inputs: []" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg="Final label prefixes to be used for identity evaluation:" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - reserved:.*" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :io\\.kubernetes\\.pod\\.namespace" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :io\\.cilium\\.k8s\\.namespace\\.labels" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :app\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:io\\.kubernetes" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:.*beta\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:k8s\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:pod-template-generation" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:pod-template-hash" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:controller-revision-hash" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:annotation.*" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:etcd_node" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=1.427766ms function="pprof.init.func1 (cell.go:50)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="79.663µs" function="gops.registerGopsHooks (cell.go:38)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=3.316235ms function="metrics.init.func1 (cell.go:11)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="6.643µs" function="metrics.init.func2 (cell.go:14)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Spire Delegate API Client is disabled as no socket path is configured" subsys=spire-delegate
time="2025-04-17T22:04:23Z" level=info msg="Mutual authentication handler is disabled as no port is configured" subsys=auth
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=102.885505ms function="cmd.init.func3 (daemon_main.go:1638)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="45.592µs" function="bgpv1.init.func1 (cell.go:46)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="225.691µs" function="metrics.RegisterCollector (metrics.go:56)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="17.743µs" function="gc.registerSignalHandler (cell.go:47)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="22.921µs" function="utime.initUtimeSync (cell.go:29)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="179.62µs" function="agentliveness.newAgentLivenessUpdater (agent_liveness.go:43)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="81.921µs" function="l2responder.NewL2ResponderReconciler (l2responder.go:63)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="96.452µs" function="garp.newGARPProcessor (processor.go:27)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Starting subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Started gops server" address="127.0.0.1:9890" subsys=gops
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="580.162µs" function="gops.registerGopsHooks.func1 (cell.go:43)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="2.241µs" function="metrics.NewRegistry.func1 (registry.go:86)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Establishing connection to apiserver" host="https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com" subsys=k8s-client
time="2025-04-17T22:04:23Z" level=info msg="Serving prometheus metrics on :9090" subsys=metrics
time="2025-04-17T22:04:23Z" level=info msg="Connected to apiserver" subsys=k8s-client
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=14.572699ms function="client.(*compositeClientset).onStart" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=3.989335ms function="authmap.newAuthMap.func1 (cell.go:27)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="95.315µs" function="configmap.newMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="64.329µs" function="signalmap.newMap.func1 (cell.go:44)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="154.459µs" function="nodemap.newNodeMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="141.941µs" function="eventsmap.newEventsMap.func1 (cell.go:35)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="77.192µs" function="*cni.cniConfigManager.Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Generating CNI configuration file with mode portmap" subsys=cni-config
time="2025-04-17T22:04:23Z" level=info msg="Wrote CNI configuration file to /host/etc/cni/net.d/05-cilium.conflist" subsys=cni-config
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=20.80947ms function="datapath.newDatapath.func1 (cells.go:113)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Restored 0 node IDs from the BPF map" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="116.525µs" function="datapath.newDatapath.func2 (cells.go:126)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="25.986µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Node].Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="1.406µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNode].Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Using autogenerated IPv4 allocation range" subsys=node v4Prefix=10.24.0.0/16
time="2025-04-17T22:04:23Z" level=info msg="no local ciliumnode found, will not restore cilium internal ips from k8s" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=103.976855ms function="node.NewLocalNodeStore.func1 (local_node_store.go:76)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="10.944µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=201.077354ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="5.827µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Using discoveryv1.EndpointSlice" subsys=k8s
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=100.341912ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="7.664µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Pod].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.251µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Namespace].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.029µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.374µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumClusterwideNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="2.234µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumCIDRGroup].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="26.98µs" function="endpointmanager.newDefaultEndpointManager.func1 (cell.go:201)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="11.042µs" function="cmd.newPolicyTrifecta.func1 (policy.go:135)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="108.577µs" function="*manager.manager.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Serving cilium node monitor v1.2 API at unix:///var/run/cilium/monitor1_2.sock" subsys=monitor-agent
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="277.892µs" function="agent.newMonitorAgent.func1 (cell.go:61)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="2.176µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumL2AnnouncementPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="6.635µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Resoring proxy ports from file failed, falling back to restoring from iptables rules" error="stat /var/run/cilium/state/proxy_ports_state.json: no such file or directory" file-path=/var/run/cilium/state/proxy_ports_state.json subsys=proxy
time="2025-04-17T22:04:24Z" level=info msg="Envoy: Starting xDS gRPC server listening on /var/run/cilium/envoy/sockets/xds.sock" subsys=envoy-manager
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=2.175944ms function="proxy.newProxy.func1 (cell.go:63)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="186.337µs" function="signal.provideSignalManager.func1 (cell.go:25)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Datapath signal listener running" subsys=signal
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=1.242006ms function="auth.registerAuthManager.func1 (cell.go:109)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="27.245µs" function="auth.registerGCJobs.func1 (cell.go:158)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="10.722µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Using Managed Neighbor Kernel support" subsys=daemon
time="2025-04-17T22:04:24Z" level=warning msg="Deprecated value for --kube-proxy-replacement: partial (use either \"true\", or \"false\")" subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="Inheriting MTU from external network interface" device=eth1 ipAddr=10.108.0.3 mtu=1500 subsys=mtu
time="2025-04-17T22:04:24Z" level=info msg="Local boot ID is \"80a520c6-e29e-4fba-b83f-e22eb5be2068\"" subsys=node
time="2025-04-17T22:04:24Z" level=info msg="Removed map pin at /sys/fs/bpf/tc/globals/cilium_ipcache, recreating and re-pinning map cilium_ipcache" file-path=/sys/fs/bpf/tc/globals/cilium_ipcache name=cilium_ipcache subsys=bpf
time="2025-04-17T22:04:24Z" level=info msg="Restored services from maps" failedServices=0 restoredServices=0 subsys=service
time="2025-04-17T22:04:24Z" level=info msg="Restored backends from maps" failedBackends=0 restoredBackends=0 skippedBackends=0 subsys=service
time="2025-04-17T22:04:24Z" level=info msg="Reading old endpoints..." subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="No old endpoints found." subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="Waiting until all Cilium CRDs are available" subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="All Cilium CRDs have been found and are available" subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-65529\" not found" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-65529\" not found" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Successfully created CiliumNode resource" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to create CiliumNode resource, will retry" error="ciliumnodes.cilium.io \"system-0-65529\" already exists" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Retrieved node information from cilium node" nodeName=system-0-65529 subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="Received own node information from API server" ipAddr.ipv4=10.108.0.3 ipAddr.ipv6="<nil>" k8sNodeIP=10.108.0.3 labels="map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0 doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-65529 kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3]" nodeName=system-0-65529 subsys=k8s v4Prefix=10.244.1.0/25 v6Prefix="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="k8s mode: Allowing localhost to reach local endpoints" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Detected devices" devices="[]" subsys=linux-datapath
time="2025-04-17T22:04:25Z" level=info msg="Enabling k8s event listener" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Removing stale endpoint interfaces" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Waiting until local node addressing before starting watchers depending on it" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Skipping kvstore configuration" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Initializing node addressing" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Initializing cluster-pool IPAM" subsys=ipam v4Prefix=10.244.1.0/25 v6Prefix="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="Restoring endpoints..." subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Endpoints restored" failed=0 restored=0 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Addressing information:" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Cluster-Name: default" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Cluster-ID: 0" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Local node-name: system-0-65529" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Node-IPv6: <nil>" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" External-Node IPv4: 10.108.0.3" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Internal-Node IPv4: 10.244.1.72" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" IPv4 allocation prefix: 10.244.1.0/25" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" IPv4 native routing prefix: 10.244.0.0/16" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Loopback IPv4: 169.254.42.1" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Local IPv4 addresses:" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 164.90.136.24" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 10.17.0.5" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 10.108.0.3" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 164.90.136.24" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-65529 subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Adding local node to cluster" node="{system-0-65529 default [{ExternalIP 164.90.136.24} {InternalIP 10.108.0.3} {CiliumInternalIP 10.244.1.72} {ExternalIP 164.90.136.24}] 10.244.1.0/25 [] <nil> [] 10.244.1.44 <nil> <nil> <nil> 0 local 0 map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0 doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-65529 kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3] map[] 1 80a520c6-e29e-4fba-b83f-e22eb5be2068}" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=2059c76d-348b-4c2b-a795-55f75c26b77c subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2059c76d-348b-4c2b-a795-55f75c26b77c policyRevision=2 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=c8b2e396-2466-40f7-b27e-e6cfc7fcf578 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c8b2e396-2466-40f7-b27e-e6cfc7fcf578 policyRevision=3 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=c3537988-b97d-4ec4-87ed-ea0a7d07dc11 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c3537988-b97d-4ec4-87ed-ea0a7d07dc11 policyRevision=4 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=5132c0a1-3303-464e-b45a-6eb3e1dde804 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5132c0a1-3303-464e-b45a-6eb3e1dde804 policyRevision=5 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=536870ae-268e-4478-a339-778448ec116d subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=536870ae-268e-4478-a339-778448ec116d policyRevision=6 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=533deec1-93e5-491b-bb28-cf3d8e70c612 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=533deec1-93e5-491b-bb28-cf3d8e70c612 policyRevision=7 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=1520cbd5-1199-4059-8766-9cdb0fcece84 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1520cbd5-1199-4059-8766-9cdb0fcece84 policyRevision=8 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Waiting until all pre-existing resources have been received" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Annotating k8s node" subsys=daemon v4CiliumHostIP.IPv4=10.244.1.72 v4IngressIP.IPv4="<nil>" v4Prefix=10.244.1.0/25 v4healthIP.IPv4=10.244.1.44 v6CiliumHostIP.IPv6="<nil>" v6IngressIP.IPv6="<nil>" v6Prefix="<nil>" v6healthIP.IPv6="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="Initializing identity allocator" subsys=identity-cache
time="2025-04-17T22:04:25Z" level=info msg="Allocating identities between range" cluster-id=0 max=65535 min=256 subsys=identity-cache
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.forwarding sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.accept_local sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.send_redirects sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.forwarding sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.accept_local sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.send_redirects sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.core.bpf_jit_enable sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.all.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.fib_multipath_use_neigh sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.unprivileged_bpf_disabled sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.timer_migration sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting up BPF datapath" bpfClockSource=ktime bpfInsnSet="<nil>" subsys=datapath-loader
time="2025-04-17T22:04:26Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:04:26Z" level=info msg="Iptables rules installed" subsys=iptables
time="2025-04-17T22:04:26Z" level=info msg="Adding new proxy port rules for cilium-dns-egress:40061" id=cilium-dns-egress subsys=proxy
time="2025-04-17T22:04:26Z" level=info msg="Iptables proxy rules installed" subsys=iptables
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration=2.222971128s function="cmd.newDaemonPromise.func1 (daemon_main.go:1694)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="44.35µs" function="utime.initUtimeSync.func1 (cell.go:33)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="7.549µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Starting IP identity watcher" subsys=ipcache
time="2025-04-17T22:04:26Z" level=info msg="Initializing daemon" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Validating configured node address ranges" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Starting connection tracking garbage collector" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Initial scan of connection tracking completed" subsys=ct-gc
time="2025-04-17T22:04:26Z" level=info msg="Regenerating restored endpoints" numRestored=0 subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Creating host endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 identity=1 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Launching Cilium health daemon" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Launching Cilium health endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Started healthz status API server" address="127.0.0.1:9879" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Processing queued endpoint deletion requests from /var/run/cilium/deleteQueue" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="processing 0 queued deletion requests" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Initializing Cilium API" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="173.706µs" function="l2respondermap.newMap.func1 (l2_responder_map4.go:44)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="6.971µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Finished regenerating restored endpoints" regenerated=0 subsys=daemon total=0
time="2025-04-17T22:04:26Z" level=info msg="Deleted orphan backends" orphanBackends=0 subsys=service
time="2025-04-17T22:04:26Z" level=info msg="Removed stale bpf map" file-path=/sys/fs/bpf/tc/globals/cilium_lb4_source_range subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Cleaning up Cilium health endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Daemon initialization completed" bootstrapTime=3.32915262s subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Configuring Hubble server" eventQueueSize=2048 maxFlows=4095 subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Starting local Hubble server" address="unix:///var/run/cilium/hubble.sock" subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Beginning to read perf buffer" startTime="2025-04-17 22:04:26.488904612 +0000 UTC m=+3.389865571" subsys=monitor-agent
time="2025-04-17T22:04:26Z" level=info msg="Starting Hubble server" address=":4244" subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Serving cilium API at unix:///var/run/cilium/cilium.sock" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Create endpoint request" addressing="&{10.244.1.28 51e14210-aae1-4e0b-85b3-8db00ab678ed default }" containerID=e5ccc336169ffbc10803b8ffd55d2c82edf18dfd761be28cb26489b2ff8e2d9e datapathConfiguration="&{false false false false false <nil>}" interface=lxc44b50984441e k8sPodName=kube-system/konnectivity-agent-qrsjc labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:26Z" level=info msg="New endpoint" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Resolving identity labels (blocking)" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=konnectivity-agent;" subsys=allocator
time="2025-04-17T22:04:26Z" level=info msg="Identity of endpoint changed" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identity=32430 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Waiting for endpoint to be generated" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Compiled new BPF template" BPFCompilationTime=448.864037ms file-path=/var/run/cilium/state/templates/38947ca1b98bdc798c90e04502afdc182aef39dd080a3b23edb6bffd04e607ac/bpf_host.o subsys=datapath-loader
time="2025-04-17T22:04:26Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1715 identity=1 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:04:27Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 identityLabels="reserved:health" ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 identity=4 identityLabels="reserved:health" ipv4=10.244.1.44 ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Compiled new BPF template" BPFCompilationTime=1.599029024s file-path=/var/run/cilium/state/templates/756338fe133d4c92642f65cd7b1cfff69481ecf0ee39d710b520c903ca69d266/bpf_lxc.o subsys=datapath-loader
time="2025-04-17T22:04:28Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=162 identity=4 ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Rewrote endpoint BPF program" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Successful endpoint creation" containerID=e5ccc33616 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=daemon
time="2025-04-17T22:04:28Z" level=info msg="Serving cilium health API at unix:///var/run/cilium/health.sock" subsys=health-server
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.42 237daac6-3b67-41fb-978e-b0d6b66588b3 default }" containerID=0a8850748733ff6c28587ecdf65266d4741a301b49375a62bf9c7435239be287 datapathConfiguration="&{false false false false false <nil>}" interface=lxc42d2980c1128 k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identityLabels="k8s:app.kubernetes.io/name=argocd-notifications-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-notifications-controller;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identity=23481 identityLabels="k8s:app.kubernetes.io/name=argocd-notifications-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.55 9301b5a0-d149-46a6-af14-78430687825d default }" containerID=95d7216a41490e02f87fb2e992471ce0cf75b78dc85e380fd90171f4bc9bf6d0 datapathConfiguration="&{false false false false false <nil>}" interface=lxc2366a1a5d536 k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identityLabels="k8s:app.kubernetes.io/name=argocd-dex-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-dex-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identity=63450 identityLabels="k8s:app.kubernetes.io/name=argocd-dex-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.82 2669e1f1-246e-42e3-9135-c44d5a4f97b9 default }" containerID=dc5a0589c22c957a7550e064268f256ef73f8cf3f3f7f153257d553d528e10ba datapathConfiguration="&{false false false false false <nil>}" interface=lxcfc40e706c1f1 k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=kubeintel;k8s:app.kubernetes.io/name=kubeintel;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=kubeintel;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=0a88507487 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.70 a75f583a-6ecc-4514-bd1a-f3cb9811aa59 default }" containerID=171e582722ff78cbb3e2803d9c64f86db914531dd41a8674d807a8730ff2add7 datapathConfiguration="&{false false false false false <nil>}" interface=lxcddc148ee97ea k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identityLabels="k8s:app.kubernetes.io/name=argocd-applicationset-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-applicationset-controller;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identity=57071 identityLabels="k8s:app.kubernetes.io/name=argocd-applicationset-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=dc5a0589c2 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.87 b376e6a5-2bc5-4707-a20f-45f607f52a0d default }" containerID=ecf5a88435a9208312b1bbf92356372420f9d0a9364a68f921674f8d1da6c206 datapathConfiguration="&{false false false false false <nil>}" interface=lxc52472155addf k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=95d7216a41 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identityLabels="k8s:app.kubernetes.io/name=argocd-redis,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-redis;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identity=3754 identityLabels="k8s:app.kubernetes.io/name=argocd-redis,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=171e582722 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=ecf5a88435 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.123 643c8582-352f-4555-85d1-3c520372aa40 default }" containerID=f3e05d1c5d95cce83c74785bb93c8dfb6e46d9960f716a195793daffe129bd4c datapathConfiguration="&{false false false false false <nil>}" interface=lxcf06a3fb206e6 k8sPodName=kube-system/coredns-854895db77-p9hbd labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.12 75d8c3dd-78e6-47fc-b485-fce5f2057cd2 default }" containerID=cbc1505e67f1c58a570cbf0c3266fca29a6e7b3edc9184a2316e3111e2c76d77 datapathConfiguration="&{false false false false false <nil>}" interface=lxc50c1d4ac5f15 k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identityLabels="k8s:app.kubernetes.io/component=controller,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cert-manager,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cert-manager,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app=cert-manager;k8s:app.kubernetes.io/component=controller;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=cert-manager;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identity=509 identityLabels="k8s:app.kubernetes.io/component=controller,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cert-manager,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cert-manager,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=f3e05d1c5d datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=cbc1505e67 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.6 2e88355d-526b-46d0-ae5c-463fdcdb24ba default }" containerID=4b4a5b96722a7cf5999e2ee9368e8d93c0649a59c45a50faf1733bd6aa69c19a datapathConfiguration="&{false false false false false <nil>}" interface=lxc424bd7f452fa k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=4b4a5b9672 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.125 de79be76-1a86-4e99-bda7-0df4ac52e8cc default }" containerID=d4eda2588c9aae6fb4d96f15064135d9b91e353e28c16d4de411eb58e27e64c3 datapathConfiguration="&{false false false false false <nil>}" interface=lxcd94c723e0e8f k8sPodName=argocd/argocd-server-6f9745ff7-kj46x labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identityLabels="k8s:app.kubernetes.io/name=argocd-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identity=1930 identityLabels="k8s:app.kubernetes.io/name=argocd-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=d4eda2588c datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.112 7ca0b46e-3484-49a2-ad4c-df150f7624ba default }" containerID=bf96af1bf079817ca7f1de6393ec7cede5086ad8ebebb76487d6aeac5f53599a datapathConfiguration="&{false false false false false <nil>}" interface=lxce8d72ff3b26a k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=bikexbike;k8s:app.kubernetes.io/name=bikexbike;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=bf96af1bf0 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=daemon
time="2025-04-17T22:04:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.59 6f9a5746-68c6-4921-84e4-50622b780e18 default }" containerID=20659f8a6d46fde4e0f82429e970ee21a032f6289ddfe65c318c2240110adc11 datapathConfiguration="&{false false false false false <nil>}" interface=lxc548fd48d3b44 k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:42Z" level=info msg="New endpoint" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Resolving identity labels (blocking)" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identityLabels="k8s:app.kubernetes.io/name=hubble-ui,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-ui" ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=hubble-ui;k8s:app.kubernetes.io/part-of=cilium;k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=hubble-ui;" subsys=allocator
time="2025-04-17T22:04:42Z" level=info msg="Identity of endpoint changed" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identity=1382 identityLabels="k8s:app.kubernetes.io/name=hubble-ui,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-ui" ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Waiting for endpoint to be generated" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=20659f8a6d datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=daemon
time="2025-04-17T22:04:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.45 9c1b71fc-815d-47ec-9f92-96cbfea24d27 default }" containerID=47fe591a371cb0c903a8cab3b3db2f4f3e06eae370f84dbcb9ad98a9a50a71aa datapathConfiguration="&{false false false false false <nil>}" interface=lxc204d68d7fe54 k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:42Z" level=info msg="New endpoint" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Resolving identity labels (blocking)" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identityLabels="k8s:app.kubernetes.io/component=cainjector,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cainjector,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cainjector,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Reusing existing global key" key="k8s:app=cainjector;k8s:app.kubernetes.io/component=cainjector;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=cainjector;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:42Z" level=info msg="Identity of endpoint changed" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identity=2984 identityLabels="k8s:app.kubernetes.io/component=cainjector,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cainjector,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cainjector,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Waiting for endpoint to be generated" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=47fe591a37 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.120 bded704d-05c4-4050-a5ee-0eed5c6120c4 default }" containerID=e46fee48dd0c6a1ea84f75f089b5fe611b89fa29aca227edd6413a23c006512e datapathConfiguration="&{false false false false false <nil>}" interface=lxcee0fb6be79a7 k8sPodName=argocd/argocd-application-controller-0 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identityLabels="k8s:app.kubernetes.io/name=argocd-application-controller,k8s:apps.kubernetes.io/pod-index=0,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller,k8s:io.kubernetes.pod.namespace=argocd,k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0" ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-application-controller;k8s:apps.kubernetes.io/pod-index=0;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller;k8s:io.kubernetes.pod.namespace=argocd;k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identity=15523 identityLabels="k8s:app.kubernetes.io/name=argocd-application-controller,k8s:apps.kubernetes.io/pod-index=0,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller,k8s:io.kubernetes.pod.namespace=argocd,k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0" ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=e46fee48dd datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.26 be15d53a-60ce-46bf-92c2-6bc683f760bd default }" containerID=575c2bfcdf037c4c311013c985613f9bb2ce3407df56e4707ad44a86637b6364 datapathConfiguration="&{false false false false false <nil>}" interface=lxc7925e256ff1d k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identityLabels="k8s:app.kubernetes.io/name=argocd-repo-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-repo-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identity=10654 identityLabels="k8s:app.kubernetes.io/name=argocd-repo-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=575c2bfcdf datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.43 612ba327-6579-40cf-97fc-62783dee8bf9 default }" containerID=af7c770ec426f73a5ffacb2513ead6ed472d9279323bf4cde554c7e6beb8af8c datapathConfiguration="&{false false false false false <nil>}" interface=lxc7a60b9024355 k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identityLabels="k8s:app.kubernetes.io/name=hubble-relay,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-relay" ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=hubble-relay;k8s:app.kubernetes.io/part-of=cilium;k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=hubble-relay;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identity=14492 identityLabels="k8s:app.kubernetes.io/name=hubble-relay,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-relay" ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=af7c770ec4 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.73 9aecd771-469c-4014-8093-84a9b9cc6e0b default }" containerID=fba121b519e4fb399e8822d635ebb01052ca3160a39c20ecb9b8c5e1fbc71e4e datapathConfiguration="&{false false false false false <nil>}" interface=lxcc0bf1d2fa535 k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Rewrote endpoint BPF program" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Successful endpoint creation" containerID=fba121b519 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.27 33e3f78f-9e7e-4e54-895f-1817802961b3 default }" containerID=468cd28bdd721e4712b2cd99a4d2a6f2620eb2814ef959f54807defc45703877 datapathConfiguration="&{false false false false false <nil>}" interface=lxcc810e996b982 k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identityLabels="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=metrics-server,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=metrics-server" ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Reusing existing global key" key="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=metrics-server;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=metrics-server;" subsys=allocator
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identity=49023 identityLabels="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=metrics-server,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=metrics-server" ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Rewrote endpoint BPF program" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Successful endpoint creation" containerID=468cd28bdd datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.46 fc10a50f-7f70-40d7-b3f1-1015ccd19000 default }" containerID=5a2994da5aec4e2b9a49fac6c3f25e2fe46407d2f89711dce6341f791db2ac42 datapathConfiguration="&{false false false false false <nil>}" interface=lxc89bdec34ed14 k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identityLabels="k8s:app.kubernetes.io/component=webhook,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=webhook,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=webhook,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Reusing existing global key" key="k8s:app=webhook;k8s:app.kubernetes.io/component=webhook;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=webhook;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identity=64008 identityLabels="k8s:app.kubernetes.io/component=webhook,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=webhook,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=webhook,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Rewrote endpoint BPF program" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Successful endpoint creation" containerID=5a2994da5a datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=daemon
time="2025-04-17T22:04:45Z" level=info msg="Create endpoint request" addressing="&{10.244.1.56 48e794a8-1e5d-4442-b945-55ba359483a6 default }" containerID=91ef5fafe63d82f38dd626c75d7124fa985e4255cd589661c53c8e6852f03648 datapathConfiguration="&{false false false false false <nil>}" interface=lxc1100eb0f0490 k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:45Z" level=info msg="New endpoint" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Resolving identity labels (blocking)" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Identity of endpoint changed" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Waiting for endpoint to be generated" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Rewrote endpoint BPF program" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Successful endpoint creation" containerID=91ef5fafe6 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=daemon
time="2025-04-17T22:05:15Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:15Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:06:16Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:06:56Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:09:26Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00528717041015625 newInterval=7m30s subsys=map-ct
time="2025-04-17T22:16:56Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01020050048828125 newInterval=11m15s subsys=map-ct
time="2025-04-17T22:28:11Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01514434814453125 newInterval=16m53s subsys=map-ct
time="2025-04-17T22:45:04Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.02292633056640625 newInterval=25m20s subsys=map-ct
time="2025-04-17T23:10:24Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0340576171875 newInterval=38m0s subsys=map-ct
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6e6a1ffb-258f-4b35-b15a-596f02831268 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6e6a1ffb-258f-4b35-b15a-596f02831268 policyRevision=10 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=e04f7347-843d-445c-8900-7f47a79ce241 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e04f7347-843d-445c-8900-7f47a79ce241 policyRevision=12 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=52caf630-c483-4769-846b-21ad91515baa subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=52caf630-c483-4769-846b-21ad91515baa policyRevision=14 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=3ffb3674-0687-493d-8026-84e6bcb16bb9 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=3ffb3674-0687-493d-8026-84e6bcb16bb9 policyRevision=16 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=2eb75a51-e804-4d38-a1e4-ee6c0a6cb604 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2eb75a51-e804-4d38-a1e4-ee6c0a6cb604 policyRevision=18 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=304e2e2d-20e2-41c5-8a93-2ba8656df5f6 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=304e2e2d-20e2-41c5-8a93-2ba8656df5f6 policyRevision=20 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=95c15523-625d-4201-9836-ed68eb9951ac subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=95c15523-625d-4201-9836-ed68eb9951ac policyRevision=22 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=0fc9d590-7cd1-4e65-9902-367afa656198 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0fc9d590-7cd1-4e65-9902-367afa656198 policyRevision=24 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=5464360e-ad6a-44c9-be09-9b2991b4d9c2 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5464360e-ad6a-44c9-be09-9b2991b4d9c2 policyRevision=26 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=667e5eb3-be94-47d6-b903-884428c4ebe8 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=667e5eb3-be94-47d6-b903-884428c4ebe8 policyRevision=28 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=d8c0ae6b-c490-41d2-8d63-7b19da985eb0 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d8c0ae6b-c490-41d2-8d63-7b19da985eb0 policyRevision=30 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=302e1ed5-4fdc-49b3-a59f-ca51d19b0f54 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=302e1ed5-4fdc-49b3-a59f-ca51d19b0f54 policyRevision=32 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=4f9abf9f-75d2-4e20-bb33-e65a7060608c subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4f9abf9f-75d2-4e20-bb33-e65a7060608c policyRevision=34 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=9686038c-8750-489a-9c43-bbefcf218bb9 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9686038c-8750-489a-9c43-bbefcf218bb9 policyRevision=36 subsys=daemon
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:38Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:59Z" level=warning msg="service not found" k8sNamespace=kubeintel k8sSvcName=cm-acme-http-solver-cmhbq obj="10.245.224.188:8089/ANY" subsys=k8s-watcher
time="2025-04-27T20:51:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=241c0125-ecf7-45bf-9bae-fb1050afe25c subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=241c0125-ecf7-45bf-9bae-fb1050afe25c policyRevision=38 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=1e0ee758-9abc-498c-a11d-7c6c6f27b04c subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1e0ee758-9abc-498c-a11d-7c6c6f27b04c policyRevision=40 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=78f9e5b7-c0e9-494f-b201-a50cf77e8ba3 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=78f9e5b7-c0e9-494f-b201-a50cf77e8ba3 policyRevision=42 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=d0521085-9936-4b1b-b240-9857b87d6842 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d0521085-9936-4b1b-b240-9857b87d6842 policyRevision=44 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=58cd1899-b3ac-449c-a055-9752928c0114 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=58cd1899-b3ac-449c-a055-9752928c0114 policyRevision=46 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=968a96f0-7193-45c8-9cc9-1b6c47cd1a9e subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=968a96f0-7193-45c8-9cc9-1b6c47cd1a9e policyRevision=48 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=f0d2eca0-d987-4069-acce-fc1b7b29b39a subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f0d2eca0-d987-4069-acce-fc1b7b29b39a policyRevision=50 subsys=daemon
time="2025-05-05T00:12:31Z" level=info msg="Delete endpoint request" containerID=91ef5fafe6 endpointID=2948 k8sNamespace=kubeintel k8sPodName=kubeintel-55955b8c46-bbjbh subsys=daemon
time="2025-05-05T00:12:31Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=kubeintel k8s:app.kubernetes.io/name=kubeintel k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=kubeintel k8s:io.kubernetes.pod.namespace=kubeintel]" subsys=allocator
time="2025-05-05T00:12:31Z" level=info msg="Removed endpoint" containerID=91ef5fafe6 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-05-05T00:19:08Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.04230499267578125 newInterval=57m0s subsys=map-ct
time="2025-05-06T03:54:10Z" level=info msg="Delete endpoint request" containerID=fba121b519 endpointID=1948 k8sNamespace=bikexbike k8sPodName=bikexbike-588cddd898-9dz72 subsys=daemon
time="2025-05-06T03:54:10Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=bikexbike k8s:app.kubernetes.io/name=bikexbike k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-06T03:54:10Z" level=info msg="Removed endpoint" containerID=fba121b519 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Delete endpoint request" containerID=4b4a5b9672 endpointID=3208 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-zg6vm subsys=daemon
time="2025-05-08T21:44:42Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-08T21:44:42Z" level=info msg="Removed endpoint" containerID=4b4a5b9672 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.15 18b4e1eb-4ff7-42ca-8932-a5db78dd3841 default }" containerID=600ed6ccf165cdd8257b93e509ee54c2045c4e16c82f2540a56530fe199e8e1a datapathConfiguration="&{false false false false false <nil>}" interface=lxc59ef080483d6 k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q labels="[]" subsys=daemon sync-build=true
time="2025-05-08T21:44:42Z" level=info msg="New endpoint" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Resolving identity labels (blocking)" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-08T21:44:42Z" level=info msg="Identity of endpoint changed" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q oldIdentity="no identity" subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Waiting for endpoint to be generated" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Rewrote endpoint BPF program" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Successful endpoint creation" containerID=600ed6ccf1 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=daemon
time="2025-05-08T22:04:57Z" level=info msg="Delete endpoint request" containerID=600ed6ccf1 endpointID=3879 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-pxz7q subsys=daemon
time="2025-05-08T22:04:57Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-08T22:04:57Z" level=info msg="Removed endpoint" containerID=600ed6ccf1 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-08T22:41:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=bb90ae07-0508-4036-baa8-5ffceb236efd subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=bb90ae07-0508-4036-baa8-5ffceb236efd policyRevision=52 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=1d294035-1980-4a5c-a452-0f9fb441c513 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d294035-1980-4a5c-a452-0f9fb441c513 policyRevision=54 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=0145b270-6958-4089-a2be-589e5d1ce521 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0145b270-6958-4089-a2be-589e5d1ce521 policyRevision=56 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dfa764e4-17ec-4d42-89be-b82cf0718a6f subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dfa764e4-17ec-4d42-89be-b82cf0718a6f policyRevision=58 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=8c26b573-d94a-4e43-ba20-29da1a02f5c4 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=8c26b573-d94a-4e43-ba20-29da1a02f5c4 policyRevision=60 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=dfa5e6b7-656f-430c-9650-c4bd08bf50d8 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dfa5e6b7-656f-430c-9650-c4bd08bf50d8 policyRevision=62 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=7aa4dcf6-54cb-4818-8ace-20360a51b415 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7aa4dcf6-54cb-4818-8ace-20360a51b415 policyRevision=64 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=e02dceca-c519-4af3-9e57-c1b6b85c23ca subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e02dceca-c519-4af3-9e57-c1b6b85c23ca policyRevision=66 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=5763e644-7ed2-4216-a7a4-25020d63fb56 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5763e644-7ed2-4216-a7a4-25020d63fb56 policyRevision=68 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=87deed5d-6352-46ee-860e-b5cf2630bbe2 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=87deed5d-6352-46ee-860e-b5cf2630bbe2 policyRevision=70 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=59969fbf-e65d-4a24-9de7-e88b92bf4f0f subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=59969fbf-e65d-4a24-9de7-e88b92bf4f0f policyRevision=72 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=19f848ef-640c-4902-97f9-feaf57e7fc12 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=19f848ef-640c-4902-97f9-feaf57e7fc12 policyRevision=74 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=d25069e3-e555-41e3-a0e2-2ca9cba5895a subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d25069e3-e555-41e3-a0e2-2ca9cba5895a policyRevision=76 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=388454d2-58f2-4e52-9a3c-af8c54f5efad subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=388454d2-58f2-4e52-9a3c-af8c54f5efad policyRevision=78 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-20T22:52:49Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:50Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-h5gc9 obj="10.245.119.204:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-d8z46 obj="10.245.21.144:8089/ANY" subsys=k8s-watcher
time="2025-05-20T23:26:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T23:26:22Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=d39d62b8-f716-4cc8-ac1f-a23217085392 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d39d62b8-f716-4cc8-ac1f-a23217085392 policyRevision=80 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=861a1e45-dd14-4122-bf1f-a6cfa97664e4 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=861a1e45-dd14-4122-bf1f-a6cfa97664e4 policyRevision=82 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=b1678a77-8b1e-4f31-8173-2ce2c9f953bb subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b1678a77-8b1e-4f31-8173-2ce2c9f953bb policyRevision=84 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dde50b09-1a02-4069-887e-ad25f66ef42f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dde50b09-1a02-4069-887e-ad25f66ef42f policyRevision=86 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=2e1293f3-727b-43d1-8a57-1f590a798b9f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2e1293f3-727b-43d1-8a57-1f590a798b9f policyRevision=88 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=4c9c715a-2c91-4e21-a839-861ec97de0e9 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4c9c715a-2c91-4e21-a839-861ec97de0e9 policyRevision=90 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=3496cede-bbe2-4cf2-8c14-e8d16efc2862 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=3496cede-bbe2-4cf2-8c14-e8d16efc2862 policyRevision=92 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-22T22:00:42Z" level=info msg="Delete endpoint request" containerID=f3e05d1c5d endpointID=1599 k8sNamespace=kube-system k8sPodName=coredns-854895db77-p9hbd subsys=daemon
time="2025-05-22T22:00:42Z" level=info msg="Releasing key" key="[k8s:doks.digitalocean.com/managed=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=coredns k8s:io.kubernetes.pod.namespace=kube-system k8s:k8s-app=kube-dns]" subsys=allocator
time="2025-05-22T22:00:42Z" level=info msg="Removed endpoint" containerID=f3e05d1c5d datapathPolicyRevision=92 desiredPolicyRevision=78 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.61 b2d8cc9e-4ea8-4894-bcbe-d3ab02c29ef6 default }" containerID=dc5e48ca7d2fb015d3448f72279677556ecfb046b18362a0bf13895b9fbbfacf datapathConfiguration="&{false false false false false <nil>}" interface=lxca532b63104ac k8sPodName=kube-system/coredns-6b79676d8-9569v labels="[]" subsys=daemon sync-build=true
time="2025-05-22T22:00:42Z" level=info msg="New endpoint" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Resolving identity labels (blocking)" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-05-22T22:00:42Z" level=info msg="Identity of endpoint changed" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v oldIdentity="no identity" subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Waiting for endpoint to be generated" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Rewrote endpoint BPF program" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=92 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Successful endpoint creation" containerID=dc5e48ca7d datapathPolicyRevision=92 desiredPolicyRevision=92 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6320599f-8831-436f-a0af-aebca63ea3b0 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6320599f-8831-436f-a0af-aebca63ea3b0 policyRevision=94 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=c1227c40-d350-45b5-8c77-fb356c789259 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c1227c40-d350-45b5-8c77-fb356c789259 policyRevision=96 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=dca1dc14-f9b6-4bc1-bc4c-34129c93a40f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dca1dc14-f9b6-4bc1-bc4c-34129c93a40f policyRevision=98 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=0daf0a66-c6ed-4b34-88f2-7c164071686f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0daf0a66-c6ed-4b34-88f2-7c164071686f policyRevision=100 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=7e246bc7-a405-4c10-96d9-67ae25b8ffe9 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7e246bc7-a405-4c10-96d9-67ae25b8ffe9 policyRevision=102 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=7d18237b-2412-49b3-885b-3fb9b36ba7ac subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7d18237b-2412-49b3-885b-3fb9b36ba7ac policyRevision=104 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=36d121f0-259e-4fc0-9f97-639eeb619115 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=36d121f0-259e-4fc0-9f97-639eeb619115 policyRevision=106 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=d02a2590-a58a-4023-891d-f2035aaee1e6 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d02a2590-a58a-4023-891d-f2035aaee1e6 policyRevision=108 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=9de6c8b4-16e9-447e-a43e-5a8afe50bef9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9de6c8b4-16e9-447e-a43e-5a8afe50bef9 policyRevision=110 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2a4d21ea-6c34-4f80-b83a-83e1a11f8ea9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2a4d21ea-6c34-4f80-b83a-83e1a11f8ea9 policyRevision=112 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=c24f1493-1c87-4ec7-bd4f-36d5f6046c4e subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c24f1493-1c87-4ec7-bd4f-36d5f6046c4e policyRevision=114 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=0c046dc9-cdeb-4072-bd15-711c728c5413 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0c046dc9-cdeb-4072-bd15-711c728c5413 policyRevision=116 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=ce2a9abe-b3e0-4fc8-9534-e0c627cc553f subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ce2a9abe-b3e0-4fc8-9534-e0c627cc553f policyRevision=118 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=e35922a8-7564-43c0-bc00-1977e288b163 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e35922a8-7564-43c0-bc00-1977e288b163 policyRevision=120 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
cilium-agent
time="2025-04-17T22:04:23Z" level=info msg="Memory available for map entries (0.003% of 4105375744B): 10263439B" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-ct-global-tcp-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-ct-global-any-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-nat-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-neigh-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-sock-rev-map-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:04:23Z" level=info msg=" --agent-health-port='9879'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-labels=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-liveness-update-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-not-ready-taint-key='node.cilium.io/agent-not-ready'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allocator-list-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allow-icmp-frag-needed='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allow-localhost='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --annotate-k8s-node='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --api-rate-limit=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --arping-refresh-period='30s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --auto-create-cilium-node-resource='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --auto-direct-node-routes='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-announce-lb-ip='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-announce-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-config-path='/var/lib/cilium/bgp/config.yaml'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-auth-map-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-global-any-max='262144'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-global-tcp-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-any='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp-fin='10s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp-syn='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-any='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-tcp-grace='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-filter-priority='1'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-fragments-map-max='8192'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-affinity-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-algorithm='random'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dev-ip-addr-inherit=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dsr-dispatch='opt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dsr-l4-xlate='frontend'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-external-clusterip='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-hash-seed='JLfvgnHc2kaSUFaI'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-table-size='16381'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-map-max='65536'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-mode='snat'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-proto-diff='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rev-nat-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rss-ipv4-src-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rss-ipv6-src-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-service-backend-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-service-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-sock='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-sock-hostns-only='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-source-range-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-map-dynamic-size-ratio='0.0025'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-map-event-buffers=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-nat-global-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-neigh-global-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-policy-map-full-reconciliation-interval='15m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-root='/sys/fs/bpf'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-sock-rev-map-max='262144'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bypass-ip-availability-upon-restore='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --certificates-directory='/var/run/cilium/certs'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cflags=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cgroup-root='/run/cilium/cgroupv2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cilium-endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-health-port='4240'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-id='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-name='default'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-pool-ipv4-mask-size='25'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --clustermesh-config='/var/lib/cilium/clustermesh/'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --clustermesh-ip-identities-sync-timeout='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cmdref=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-chaining-mode='portmap'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-chaining-target=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-exclusive='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-external-routing='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-log-file='/var/run/cilium/cilium-cni.log'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config-dir='/tmp/cilium/config-map'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config-sources='config-map:kube-system/cilium-config'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --conntrack-gc-interval='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --conntrack-gc-max-interval='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --container-ip-local-reserved-ports='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --crd-wait-timeout='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --custom-cni-conf='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --datapath-mode='veth'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --debug='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --debug-verbose=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --derive-masquerade-ip-addr-from-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --devices=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --direct-routing-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-cnp-status-updates='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-endpoint-crd='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-envoy-version-check='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-iptables-feeder-rules=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dns-max-ips-per-restored-rule='1000'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dns-policy-unload-on-shutdown='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-concurrency-limit='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-concurrency-processing-grace-period='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-enable-transparent-mode='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-insecure-skip-transparent-mode-check='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-lock-count='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-lock-timeout='500ms'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-socket-linger-timeout='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-gateway-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-gateway-reconciliation-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-masquerade-interfaces=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-multi-home-ip-rule-compat='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-auto-protect-node-port-range='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bandwidth-manager='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bbr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bgp-control-plane='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-clock-probe='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-masquerade='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-tproxy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-api-server-access='*'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-endpoint-slice='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-health-api-server-access='*'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-custom-calls='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-endpoint-health-checking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-endpoint-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-envoy-config='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-external-ips='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-health-check-nodeport='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-health-checking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-high-scale-ipcache='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-firewall='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-legacy-routing='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-port='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-hubble='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-hubble-recorder-api='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-icmp-rules='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-identity-mark='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ip-masq-agent='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec-key-watcher='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec-xfrm-state-caching='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-big-tcp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-egress-gateway='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-fragment-tracking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-masquerade='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-big-tcp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-masquerade='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-ndp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-api-discovery='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-endpoint-slice='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-event-handover='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-networkpolicy='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-terminating-endpoint='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-announcements='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-neigh-discovery='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-pod-announcements='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l7-proxy='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-local-node-route='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-local-redirect-policy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-mke='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-monitor='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-nat46x64-gateway='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-node-port='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-pmtu-discovery='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-policy='default'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-recorder='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-remote-node-identity='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-runtime-device-detection='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-sctp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-service-topology='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-session-affinity='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-srv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-stale-cilium-endpoint-cleanup='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-svc-source-range-check='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-tracing='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-unreachable-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-vtep='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-well-known-identities='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-wireguard='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-wireguard-userspace-fallback='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-xdp-prefilter='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-xt-socket-fallback='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --encrypt-interface=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --encrypt-node='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-queue-size='25'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-status=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --envoy-config-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --envoy-log=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --exclude-local-address=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --external-envoy-proxy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --fixed-identity-mapping=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --fqdn-regex-compile-lru-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --gops-port='9890'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-403-msg=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-idle-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-max-grpc-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-normalize-path='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-request-timeout='3600'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-retry-count='3'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-retry-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-disable-tls='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-event-buffer-capacity='4095'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-event-queue-size='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-compress='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-max-backups='5'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-max-size-mb='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-path=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-listen-address=':4244'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-metrics=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-metrics-server=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-monitor-events=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-prefer-ipv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-recorder-sink-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-recorder-storage-path='/var/run/cilium/pcaps'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-skip-unknown-cgroup-ids='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-socket-path='/var/run/cilium/hubble.sock'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-cert-file='/var/lib/cilium/tls/hubble/server.crt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-client-ca-files='/var/lib/cilium/tls/hubble/client-ca.crt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-key-file='/var/lib/cilium/tls/hubble/server.key'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-allocation-mode='crd'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-change-grace-period='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-gc-interval='5m'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-heartbeat-timeout='15m'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-restore-grace-period='10m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-egress-gateway-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-iptables-rules='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-no-conntrack-iptables-rules='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ip-allocation-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ip-masq-agent-config-path='/etc/config/ip-masq-agent'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam='cluster-pool'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam-cilium-node-update-rate='15s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam-multi-pool-pre-allocation='default=8'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipsec-key-file=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipsec-key-rotation-duration='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --iptables-lock-timeout='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --iptables-random-fully='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-native-routing-cidr='10.244.0.0/16'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-node='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-pod-subnets=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-service-loopback-address='169.254.42.1'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-service-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-cluster-alloc-cidr='f00d::/64'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-mcast-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-native-routing-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-node='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-pod-subnets=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-service-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --join-cluster='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-api-server='https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-client-burst='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-client-qps='5'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-heartbeat-timeout='30s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-kubeconfig-path=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-namespace='kube-system'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-require-ipv4-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-require-ipv6-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-service-cache-size='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-service-proxy-name=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-sync-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-watcher-endpoint-selector='metadata.name!=kube-scheduler,metadata.name!=kube-controller-manager,metadata.name!=etcd-operator,metadata.name!=gcp-controller-manager'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --keep-config='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kube-proxy-replacement='partial'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kube-proxy-replacement-healthz-bind-address=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-connectivity-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-lease-ttl='15m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-max-consecutive-quorum-errors='2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-opt=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-periodic-sync='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-lease-duration='15s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-renew-deadline='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-retry-period='2s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-pod-announcements-interface=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --label-prefix-file=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --labels=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --lib-dir='/var/lib/cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-max-addr-scope='252'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-router-ipv4=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-router-ipv6=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-driver=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-opt=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-system-load='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --max-controller-interval='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-enabled='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-mutual-listener-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-rotated-identities-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-signal-backoff-duration='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-spiffe-trust-domain='spiffe.cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-spire-admin-socket=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --metrics='+cilium_bpf_map_pressure'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mke-cgroup-mount=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation='medium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation-flags='all'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation-interval='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-queue-size='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mtu='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-encryption-opt-out-labels='node-role.kubernetes.io/control-plane'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-algorithm='random'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-bind-protection='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-mode='snat'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-range='30000,32767'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --nodes-gc-interval='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --operator-api-serve-addr='127.0.0.1:9234'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-audit-mode='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-queue-size='100'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof-address='localhost'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof-port='6060'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --preallocate-bpf-maps='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --prepend-iptables-chains='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --procfs='/host/proc'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --prometheus-serve-addr=':9090'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-connect-timeout='2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-gid='1337'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-idle-timeout-seconds='60'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-max-connection-duration-seconds='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-max-requests-per-connection='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-prometheus-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-xff-num-trusted-hops-egress='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-xff-num-trusted-hops-ingress='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --read-cni-conf=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --remove-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --restore='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --restored-proxy-ports-age-limit='15'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --route-metric='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --routing-mode='native'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --set-cilium-is-up-condition='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --set-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --sidecar-istio-proxy-image='cilium/istio_proxy'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --single-cluster-route='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --skip-cnp-status-startup-clean='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --socket-path='/var/run/cilium/cilium.sock'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --srv6-encap-mode='reduced'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --state-dir='/var/run/cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --synchronize-k8s-nodes='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-dns-reject-response-code='refused'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-enable-dns-compression='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-endpoint-max-ip-per-hostname='50'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-idle-connection-grace-period='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-max-deferred-connection-deletes='10000'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-min-ttl='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-pre-cache=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-proxy-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-proxy-response-max-delay='100ms'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --trace-payloadlen='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --trace-sock='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel-protocol='vxlan'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --unmanaged-pod-watcher-interval='15'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --use-cilium-internal-ip-for-ipsec='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --version='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vlan-bpf-bypass=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-endpoint=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-mac=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-mask=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --wireguard-encapsulate='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --write-cni-conf-when-ready='/host/etc/cni/net.d/05-cilium.conflist'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" _ _ _" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" ___|_| |_|_ _ _____" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="| _| | | | | | |" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="|___|_|_|_|___|_|_|_|" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="Cilium 1.14.18 5418622a22 2024-07-03T11:57:56+02:00 go version go1.22.10 linux/amd64" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="clang (10.0.0) and kernel (6.1.0) versions: OK!" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="linking environment: OK!" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="Kernel config file not found: if the agent fails to start, check the system requirements at https://docs.cilium.io/en/stable/operations/system_requirements" subsys=probes
time="2025-04-17T22:04:23Z" level=info msg="Detected mounted BPF filesystem at /sys/fs/bpf" subsys=bpf
time="2025-04-17T22:04:23Z" level=info msg="Mounted cgroupv2 filesystem at /run/cilium/cgroupv2" subsys=cgroups
time="2025-04-17T22:04:23Z" level=info msg="Parsing base label prefixes from default label list" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg="Parsing additional label prefixes from user inputs: []" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg="Final label prefixes to be used for identity evaluation:" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - reserved:.*" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :io\\.kubernetes\\.pod\\.namespace" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :io\\.cilium\\.k8s\\.namespace\\.labels" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :app\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:io\\.kubernetes" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:.*beta\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:k8s\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:pod-template-generation" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:pod-template-hash" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:controller-revision-hash" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:annotation.*" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:etcd_node" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=1.427766ms function="pprof.init.func1 (cell.go:50)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="79.663µs" function="gops.registerGopsHooks (cell.go:38)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=3.316235ms function="metrics.init.func1 (cell.go:11)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="6.643µs" function="metrics.init.func2 (cell.go:14)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Spire Delegate API Client is disabled as no socket path is configured" subsys=spire-delegate
time="2025-04-17T22:04:23Z" level=info msg="Mutual authentication handler is disabled as no port is configured" subsys=auth
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=102.885505ms function="cmd.init.func3 (daemon_main.go:1638)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="45.592µs" function="bgpv1.init.func1 (cell.go:46)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="225.691µs" function="metrics.RegisterCollector (metrics.go:56)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="17.743µs" function="gc.registerSignalHandler (cell.go:47)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="22.921µs" function="utime.initUtimeSync (cell.go:29)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="179.62µs" function="agentliveness.newAgentLivenessUpdater (agent_liveness.go:43)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="81.921µs" function="l2responder.NewL2ResponderReconciler (l2responder.go:63)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="96.452µs" function="garp.newGARPProcessor (processor.go:27)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Starting subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Started gops server" address="127.0.0.1:9890" subsys=gops
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="580.162µs" function="gops.registerGopsHooks.func1 (cell.go:43)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="2.241µs" function="metrics.NewRegistry.func1 (registry.go:86)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Establishing connection to apiserver" host="https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com" subsys=k8s-client
time="2025-04-17T22:04:23Z" level=info msg="Serving prometheus metrics on :9090" subsys=metrics
time="2025-04-17T22:04:23Z" level=info msg="Connected to apiserver" subsys=k8s-client
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=14.572699ms function="client.(*compositeClientset).onStart" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=3.989335ms function="authmap.newAuthMap.func1 (cell.go:27)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="95.315µs" function="configmap.newMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="64.329µs" function="signalmap.newMap.func1 (cell.go:44)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="154.459µs" function="nodemap.newNodeMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="141.941µs" function="eventsmap.newEventsMap.func1 (cell.go:35)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="77.192µs" function="*cni.cniConfigManager.Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Generating CNI configuration file with mode portmap" subsys=cni-config
time="2025-04-17T22:04:23Z" level=info msg="Wrote CNI configuration file to /host/etc/cni/net.d/05-cilium.conflist" subsys=cni-config
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=20.80947ms function="datapath.newDatapath.func1 (cells.go:113)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Restored 0 node IDs from the BPF map" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="116.525µs" function="datapath.newDatapath.func2 (cells.go:126)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="25.986µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Node].Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="1.406µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNode].Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Using autogenerated IPv4 allocation range" subsys=node v4Prefix=10.24.0.0/16
time="2025-04-17T22:04:23Z" level=info msg="no local ciliumnode found, will not restore cilium internal ips from k8s" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=103.976855ms function="node.NewLocalNodeStore.func1 (local_node_store.go:76)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="10.944µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=201.077354ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="5.827µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Using discoveryv1.EndpointSlice" subsys=k8s
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=100.341912ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="7.664µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Pod].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.251µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Namespace].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.029µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.374µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumClusterwideNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="2.234µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumCIDRGroup].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="26.98µs" function="endpointmanager.newDefaultEndpointManager.func1 (cell.go:201)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="11.042µs" function="cmd.newPolicyTrifecta.func1 (policy.go:135)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="108.577µs" function="*manager.manager.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Serving cilium node monitor v1.2 API at unix:///var/run/cilium/monitor1_2.sock" subsys=monitor-agent
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="277.892µs" function="agent.newMonitorAgent.func1 (cell.go:61)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="2.176µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumL2AnnouncementPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="6.635µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Resoring proxy ports from file failed, falling back to restoring from iptables rules" error="stat /var/run/cilium/state/proxy_ports_state.json: no such file or directory" file-path=/var/run/cilium/state/proxy_ports_state.json subsys=proxy
time="2025-04-17T22:04:24Z" level=info msg="Envoy: Starting xDS gRPC server listening on /var/run/cilium/envoy/sockets/xds.sock" subsys=envoy-manager
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=2.175944ms function="proxy.newProxy.func1 (cell.go:63)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="186.337µs" function="signal.provideSignalManager.func1 (cell.go:25)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Datapath signal listener running" subsys=signal
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=1.242006ms function="auth.registerAuthManager.func1 (cell.go:109)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="27.245µs" function="auth.registerGCJobs.func1 (cell.go:158)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="10.722µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Using Managed Neighbor Kernel support" subsys=daemon
time="2025-04-17T22:04:24Z" level=warning msg="Deprecated value for --kube-proxy-replacement: partial (use either \"true\", or \"false\")" subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="Inheriting MTU from external network interface" device=eth1 ipAddr=10.108.0.3 mtu=1500 subsys=mtu
time="2025-04-17T22:04:24Z" level=info msg="Local boot ID is \"80a520c6-e29e-4fba-b83f-e22eb5be2068\"" subsys=node
time="2025-04-17T22:04:24Z" level=info msg="Removed map pin at /sys/fs/bpf/tc/globals/cilium_ipcache, recreating and re-pinning map cilium_ipcache" file-path=/sys/fs/bpf/tc/globals/cilium_ipcache name=cilium_ipcache subsys=bpf
time="2025-04-17T22:04:24Z" level=info msg="Restored services from maps" failedServices=0 restoredServices=0 subsys=service
time="2025-04-17T22:04:24Z" level=info msg="Restored backends from maps" failedBackends=0 restoredBackends=0 skippedBackends=0 subsys=service
time="2025-04-17T22:04:24Z" level=info msg="Reading old endpoints..." subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="No old endpoints found." subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="Waiting until all Cilium CRDs are available" subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="All Cilium CRDs have been found and are available" subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-65529\" not found" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-65529\" not found" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Successfully created CiliumNode resource" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to create CiliumNode resource, will retry" error="ciliumnodes.cilium.io \"system-0-65529\" already exists" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Retrieved node information from cilium node" nodeName=system-0-65529 subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="Received own node information from API server" ipAddr.ipv4=10.108.0.3 ipAddr.ipv6="<nil>" k8sNodeIP=10.108.0.3 labels="map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0 doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-65529 kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3]" nodeName=system-0-65529 subsys=k8s v4Prefix=10.244.1.0/25 v6Prefix="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="k8s mode: Allowing localhost to reach local endpoints" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Detected devices" devices="[]" subsys=linux-datapath
time="2025-04-17T22:04:25Z" level=info msg="Enabling k8s event listener" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Removing stale endpoint interfaces" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Waiting until local node addressing before starting watchers depending on it" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Skipping kvstore configuration" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Initializing node addressing" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Initializing cluster-pool IPAM" subsys=ipam v4Prefix=10.244.1.0/25 v6Prefix="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="Restoring endpoints..." subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Endpoints restored" failed=0 restored=0 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Addressing information:" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Cluster-Name: default" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Cluster-ID: 0" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Local node-name: system-0-65529" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Node-IPv6: <nil>" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" External-Node IPv4: 10.108.0.3" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Internal-Node IPv4: 10.244.1.72" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" IPv4 allocation prefix: 10.244.1.0/25" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" IPv4 native routing prefix: 10.244.0.0/16" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Loopback IPv4: 169.254.42.1" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Local IPv4 addresses:" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 164.90.136.24" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 10.17.0.5" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 10.108.0.3" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 164.90.136.24" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-65529 subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Adding local node to cluster" node="{system-0-65529 default [{ExternalIP 164.90.136.24} {InternalIP 10.108.0.3} {CiliumInternalIP 10.244.1.72} {ExternalIP 164.90.136.24}] 10.244.1.0/25 [] <nil> [] 10.244.1.44 <nil> <nil> <nil> 0 local 0 map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0 doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-65529 kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3] map[] 1 80a520c6-e29e-4fba-b83f-e22eb5be2068}" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=2059c76d-348b-4c2b-a795-55f75c26b77c subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2059c76d-348b-4c2b-a795-55f75c26b77c policyRevision=2 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=c8b2e396-2466-40f7-b27e-e6cfc7fcf578 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c8b2e396-2466-40f7-b27e-e6cfc7fcf578 policyRevision=3 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=c3537988-b97d-4ec4-87ed-ea0a7d07dc11 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c3537988-b97d-4ec4-87ed-ea0a7d07dc11 policyRevision=4 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=5132c0a1-3303-464e-b45a-6eb3e1dde804 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5132c0a1-3303-464e-b45a-6eb3e1dde804 policyRevision=5 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=536870ae-268e-4478-a339-778448ec116d subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=536870ae-268e-4478-a339-778448ec116d policyRevision=6 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=533deec1-93e5-491b-bb28-cf3d8e70c612 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=533deec1-93e5-491b-bb28-cf3d8e70c612 policyRevision=7 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=1520cbd5-1199-4059-8766-9cdb0fcece84 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1520cbd5-1199-4059-8766-9cdb0fcece84 policyRevision=8 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Waiting until all pre-existing resources have been received" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Annotating k8s node" subsys=daemon v4CiliumHostIP.IPv4=10.244.1.72 v4IngressIP.IPv4="<nil>" v4Prefix=10.244.1.0/25 v4healthIP.IPv4=10.244.1.44 v6CiliumHostIP.IPv6="<nil>" v6IngressIP.IPv6="<nil>" v6Prefix="<nil>" v6healthIP.IPv6="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="Initializing identity allocator" subsys=identity-cache
time="2025-04-17T22:04:25Z" level=info msg="Allocating identities between range" cluster-id=0 max=65535 min=256 subsys=identity-cache
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.forwarding sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.accept_local sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.send_redirects sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.forwarding sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.accept_local sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.send_redirects sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.core.bpf_jit_enable sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.all.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.fib_multipath_use_neigh sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.unprivileged_bpf_disabled sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.timer_migration sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting up BPF datapath" bpfClockSource=ktime bpfInsnSet="<nil>" subsys=datapath-loader
time="2025-04-17T22:04:26Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:04:26Z" level=info msg="Iptables rules installed" subsys=iptables
time="2025-04-17T22:04:26Z" level=info msg="Adding new proxy port rules for cilium-dns-egress:40061" id=cilium-dns-egress subsys=proxy
time="2025-04-17T22:04:26Z" level=info msg="Iptables proxy rules installed" subsys=iptables
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration=2.222971128s function="cmd.newDaemonPromise.func1 (daemon_main.go:1694)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="44.35µs" function="utime.initUtimeSync.func1 (cell.go:33)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="7.549µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Starting IP identity watcher" subsys=ipcache
time="2025-04-17T22:04:26Z" level=info msg="Initializing daemon" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Validating configured node address ranges" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Starting connection tracking garbage collector" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Initial scan of connection tracking completed" subsys=ct-gc
time="2025-04-17T22:04:26Z" level=info msg="Regenerating restored endpoints" numRestored=0 subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Creating host endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 identity=1 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Launching Cilium health daemon" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Launching Cilium health endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Started healthz status API server" address="127.0.0.1:9879" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Processing queued endpoint deletion requests from /var/run/cilium/deleteQueue" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="processing 0 queued deletion requests" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Initializing Cilium API" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="173.706µs" function="l2respondermap.newMap.func1 (l2_responder_map4.go:44)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="6.971µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Finished regenerating restored endpoints" regenerated=0 subsys=daemon total=0
time="2025-04-17T22:04:26Z" level=info msg="Deleted orphan backends" orphanBackends=0 subsys=service
time="2025-04-17T22:04:26Z" level=info msg="Removed stale bpf map" file-path=/sys/fs/bpf/tc/globals/cilium_lb4_source_range subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Cleaning up Cilium health endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Daemon initialization completed" bootstrapTime=3.32915262s subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Configuring Hubble server" eventQueueSize=2048 maxFlows=4095 subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Starting local Hubble server" address="unix:///var/run/cilium/hubble.sock" subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Beginning to read perf buffer" startTime="2025-04-17 22:04:26.488904612 +0000 UTC m=+3.389865571" subsys=monitor-agent
time="2025-04-17T22:04:26Z" level=info msg="Starting Hubble server" address=":4244" subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Serving cilium API at unix:///var/run/cilium/cilium.sock" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Create endpoint request" addressing="&{10.244.1.28 51e14210-aae1-4e0b-85b3-8db00ab678ed default }" containerID=e5ccc336169ffbc10803b8ffd55d2c82edf18dfd761be28cb26489b2ff8e2d9e datapathConfiguration="&{false false false false false <nil>}" interface=lxc44b50984441e k8sPodName=kube-system/konnectivity-agent-qrsjc labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:26Z" level=info msg="New endpoint" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Resolving identity labels (blocking)" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=konnectivity-agent;" subsys=allocator
time="2025-04-17T22:04:26Z" level=info msg="Identity of endpoint changed" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identity=32430 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Waiting for endpoint to be generated" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Compiled new BPF template" BPFCompilationTime=448.864037ms file-path=/var/run/cilium/state/templates/38947ca1b98bdc798c90e04502afdc182aef39dd080a3b23edb6bffd04e607ac/bpf_host.o subsys=datapath-loader
time="2025-04-17T22:04:26Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1715 identity=1 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:04:27Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 identityLabels="reserved:health" ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 identity=4 identityLabels="reserved:health" ipv4=10.244.1.44 ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Compiled new BPF template" BPFCompilationTime=1.599029024s file-path=/var/run/cilium/state/templates/756338fe133d4c92642f65cd7b1cfff69481ecf0ee39d710b520c903ca69d266/bpf_lxc.o subsys=datapath-loader
time="2025-04-17T22:04:28Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=162 identity=4 ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Rewrote endpoint BPF program" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Successful endpoint creation" containerID=e5ccc33616 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=daemon
time="2025-04-17T22:04:28Z" level=info msg="Serving cilium health API at unix:///var/run/cilium/health.sock" subsys=health-server
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.42 237daac6-3b67-41fb-978e-b0d6b66588b3 default }" containerID=0a8850748733ff6c28587ecdf65266d4741a301b49375a62bf9c7435239be287 datapathConfiguration="&{false false false false false <nil>}" interface=lxc42d2980c1128 k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identityLabels="k8s:app.kubernetes.io/name=argocd-notifications-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-notifications-controller;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identity=23481 identityLabels="k8s:app.kubernetes.io/name=argocd-notifications-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.55 9301b5a0-d149-46a6-af14-78430687825d default }" containerID=95d7216a41490e02f87fb2e992471ce0cf75b78dc85e380fd90171f4bc9bf6d0 datapathConfiguration="&{false false false false false <nil>}" interface=lxc2366a1a5d536 k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identityLabels="k8s:app.kubernetes.io/name=argocd-dex-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-dex-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identity=63450 identityLabels="k8s:app.kubernetes.io/name=argocd-dex-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.82 2669e1f1-246e-42e3-9135-c44d5a4f97b9 default }" containerID=dc5a0589c22c957a7550e064268f256ef73f8cf3f3f7f153257d553d528e10ba datapathConfiguration="&{false false false false false <nil>}" interface=lxcfc40e706c1f1 k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=kubeintel;k8s:app.kubernetes.io/name=kubeintel;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=kubeintel;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=0a88507487 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.70 a75f583a-6ecc-4514-bd1a-f3cb9811aa59 default }" containerID=171e582722ff78cbb3e2803d9c64f86db914531dd41a8674d807a8730ff2add7 datapathConfiguration="&{false false false false false <nil>}" interface=lxcddc148ee97ea k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identityLabels="k8s:app.kubernetes.io/name=argocd-applicationset-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-applicationset-controller;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identity=57071 identityLabels="k8s:app.kubernetes.io/name=argocd-applicationset-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=dc5a0589c2 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.87 b376e6a5-2bc5-4707-a20f-45f607f52a0d default }" containerID=ecf5a88435a9208312b1bbf92356372420f9d0a9364a68f921674f8d1da6c206 datapathConfiguration="&{false false false false false <nil>}" interface=lxc52472155addf k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=95d7216a41 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identityLabels="k8s:app.kubernetes.io/name=argocd-redis,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-redis;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identity=3754 identityLabels="k8s:app.kubernetes.io/name=argocd-redis,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=171e582722 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=ecf5a88435 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.123 643c8582-352f-4555-85d1-3c520372aa40 default }" containerID=f3e05d1c5d95cce83c74785bb93c8dfb6e46d9960f716a195793daffe129bd4c datapathConfiguration="&{false false false false false <nil>}" interface=lxcf06a3fb206e6 k8sPodName=kube-system/coredns-854895db77-p9hbd labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.12 75d8c3dd-78e6-47fc-b485-fce5f2057cd2 default }" containerID=cbc1505e67f1c58a570cbf0c3266fca29a6e7b3edc9184a2316e3111e2c76d77 datapathConfiguration="&{false false false false false <nil>}" interface=lxc50c1d4ac5f15 k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identityLabels="k8s:app.kubernetes.io/component=controller,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cert-manager,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cert-manager,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app=cert-manager;k8s:app.kubernetes.io/component=controller;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=cert-manager;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identity=509 identityLabels="k8s:app.kubernetes.io/component=controller,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cert-manager,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cert-manager,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=f3e05d1c5d datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=cbc1505e67 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.6 2e88355d-526b-46d0-ae5c-463fdcdb24ba default }" containerID=4b4a5b96722a7cf5999e2ee9368e8d93c0649a59c45a50faf1733bd6aa69c19a datapathConfiguration="&{false false false false false <nil>}" interface=lxc424bd7f452fa k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=4b4a5b9672 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.125 de79be76-1a86-4e99-bda7-0df4ac52e8cc default }" containerID=d4eda2588c9aae6fb4d96f15064135d9b91e353e28c16d4de411eb58e27e64c3 datapathConfiguration="&{false false false false false <nil>}" interface=lxcd94c723e0e8f k8sPodName=argocd/argocd-server-6f9745ff7-kj46x labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identityLabels="k8s:app.kubernetes.io/name=argocd-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identity=1930 identityLabels="k8s:app.kubernetes.io/name=argocd-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=d4eda2588c datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.112 7ca0b46e-3484-49a2-ad4c-df150f7624ba default }" containerID=bf96af1bf079817ca7f1de6393ec7cede5086ad8ebebb76487d6aeac5f53599a datapathConfiguration="&{false false false false false <nil>}" interface=lxce8d72ff3b26a k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=bikexbike;k8s:app.kubernetes.io/name=bikexbike;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=bf96af1bf0 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=daemon
time="2025-04-17T22:04:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.59 6f9a5746-68c6-4921-84e4-50622b780e18 default }" containerID=20659f8a6d46fde4e0f82429e970ee21a032f6289ddfe65c318c2240110adc11 datapathConfiguration="&{false false false false false <nil>}" interface=lxc548fd48d3b44 k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:42Z" level=info msg="New endpoint" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Resolving identity labels (blocking)" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identityLabels="k8s:app.kubernetes.io/name=hubble-ui,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-ui" ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=hubble-ui;k8s:app.kubernetes.io/part-of=cilium;k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=hubble-ui;" subsys=allocator
time="2025-04-17T22:04:42Z" level=info msg="Identity of endpoint changed" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identity=1382 identityLabels="k8s:app.kubernetes.io/name=hubble-ui,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-ui" ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Waiting for endpoint to be generated" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=20659f8a6d datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=daemon
time="2025-04-17T22:04:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.45 9c1b71fc-815d-47ec-9f92-96cbfea24d27 default }" containerID=47fe591a371cb0c903a8cab3b3db2f4f3e06eae370f84dbcb9ad98a9a50a71aa datapathConfiguration="&{false false false false false <nil>}" interface=lxc204d68d7fe54 k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:42Z" level=info msg="New endpoint" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Resolving identity labels (blocking)" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identityLabels="k8s:app.kubernetes.io/component=cainjector,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cainjector,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cainjector,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Reusing existing global key" key="k8s:app=cainjector;k8s:app.kubernetes.io/component=cainjector;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=cainjector;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:42Z" level=info msg="Identity of endpoint changed" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identity=2984 identityLabels="k8s:app.kubernetes.io/component=cainjector,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cainjector,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cainjector,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Waiting for endpoint to be generated" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=47fe591a37 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.120 bded704d-05c4-4050-a5ee-0eed5c6120c4 default }" containerID=e46fee48dd0c6a1ea84f75f089b5fe611b89fa29aca227edd6413a23c006512e datapathConfiguration="&{false false false false false <nil>}" interface=lxcee0fb6be79a7 k8sPodName=argocd/argocd-application-controller-0 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identityLabels="k8s:app.kubernetes.io/name=argocd-application-controller,k8s:apps.kubernetes.io/pod-index=0,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller,k8s:io.kubernetes.pod.namespace=argocd,k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0" ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-application-controller;k8s:apps.kubernetes.io/pod-index=0;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller;k8s:io.kubernetes.pod.namespace=argocd;k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identity=15523 identityLabels="k8s:app.kubernetes.io/name=argocd-application-controller,k8s:apps.kubernetes.io/pod-index=0,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller,k8s:io.kubernetes.pod.namespace=argocd,k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0" ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=e46fee48dd datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.26 be15d53a-60ce-46bf-92c2-6bc683f760bd default }" containerID=575c2bfcdf037c4c311013c985613f9bb2ce3407df56e4707ad44a86637b6364 datapathConfiguration="&{false false false false false <nil>}" interface=lxc7925e256ff1d k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identityLabels="k8s:app.kubernetes.io/name=argocd-repo-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-repo-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identity=10654 identityLabels="k8s:app.kubernetes.io/name=argocd-repo-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=575c2bfcdf datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.43 612ba327-6579-40cf-97fc-62783dee8bf9 default }" containerID=af7c770ec426f73a5ffacb2513ead6ed472d9279323bf4cde554c7e6beb8af8c datapathConfiguration="&{false false false false false <nil>}" interface=lxc7a60b9024355 k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identityLabels="k8s:app.kubernetes.io/name=hubble-relay,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-relay" ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=hubble-relay;k8s:app.kubernetes.io/part-of=cilium;k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=hubble-relay;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identity=14492 identityLabels="k8s:app.kubernetes.io/name=hubble-relay,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-relay" ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=af7c770ec4 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.73 9aecd771-469c-4014-8093-84a9b9cc6e0b default }" containerID=fba121b519e4fb399e8822d635ebb01052ca3160a39c20ecb9b8c5e1fbc71e4e datapathConfiguration="&{false false false false false <nil>}" interface=lxcc0bf1d2fa535 k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Rewrote endpoint BPF program" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Successful endpoint creation" containerID=fba121b519 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.27 33e3f78f-9e7e-4e54-895f-1817802961b3 default }" containerID=468cd28bdd721e4712b2cd99a4d2a6f2620eb2814ef959f54807defc45703877 datapathConfiguration="&{false false false false false <nil>}" interface=lxcc810e996b982 k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identityLabels="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=metrics-server,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=metrics-server" ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Reusing existing global key" key="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=metrics-server;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=metrics-server;" subsys=allocator
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identity=49023 identityLabels="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=metrics-server,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=metrics-server" ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Rewrote endpoint BPF program" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Successful endpoint creation" containerID=468cd28bdd datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.46 fc10a50f-7f70-40d7-b3f1-1015ccd19000 default }" containerID=5a2994da5aec4e2b9a49fac6c3f25e2fe46407d2f89711dce6341f791db2ac42 datapathConfiguration="&{false false false false false <nil>}" interface=lxc89bdec34ed14 k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identityLabels="k8s:app.kubernetes.io/component=webhook,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=webhook,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=webhook,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Reusing existing global key" key="k8s:app=webhook;k8s:app.kubernetes.io/component=webhook;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=webhook;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identity=64008 identityLabels="k8s:app.kubernetes.io/component=webhook,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=webhook,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=webhook,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Rewrote endpoint BPF program" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Successful endpoint creation" containerID=5a2994da5a datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=daemon
time="2025-04-17T22:04:45Z" level=info msg="Create endpoint request" addressing="&{10.244.1.56 48e794a8-1e5d-4442-b945-55ba359483a6 default }" containerID=91ef5fafe63d82f38dd626c75d7124fa985e4255cd589661c53c8e6852f03648 datapathConfiguration="&{false false false false false <nil>}" interface=lxc1100eb0f0490 k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:45Z" level=info msg="New endpoint" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Resolving identity labels (blocking)" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Identity of endpoint changed" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Waiting for endpoint to be generated" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Rewrote endpoint BPF program" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Successful endpoint creation" containerID=91ef5fafe6 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=daemon
time="2025-04-17T22:05:15Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:15Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:06:16Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:06:56Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:09:26Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00528717041015625 newInterval=7m30s subsys=map-ct
time="2025-04-17T22:16:56Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01020050048828125 newInterval=11m15s subsys=map-ct
time="2025-04-17T22:28:11Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01514434814453125 newInterval=16m53s subsys=map-ct
time="2025-04-17T22:45:04Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.02292633056640625 newInterval=25m20s subsys=map-ct
time="2025-04-17T23:10:24Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0340576171875 newInterval=38m0s subsys=map-ct
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6e6a1ffb-258f-4b35-b15a-596f02831268 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6e6a1ffb-258f-4b35-b15a-596f02831268 policyRevision=10 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=e04f7347-843d-445c-8900-7f47a79ce241 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e04f7347-843d-445c-8900-7f47a79ce241 policyRevision=12 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=52caf630-c483-4769-846b-21ad91515baa subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=52caf630-c483-4769-846b-21ad91515baa policyRevision=14 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=3ffb3674-0687-493d-8026-84e6bcb16bb9 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=3ffb3674-0687-493d-8026-84e6bcb16bb9 policyRevision=16 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=2eb75a51-e804-4d38-a1e4-ee6c0a6cb604 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2eb75a51-e804-4d38-a1e4-ee6c0a6cb604 policyRevision=18 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=304e2e2d-20e2-41c5-8a93-2ba8656df5f6 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=304e2e2d-20e2-41c5-8a93-2ba8656df5f6 policyRevision=20 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=95c15523-625d-4201-9836-ed68eb9951ac subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=95c15523-625d-4201-9836-ed68eb9951ac policyRevision=22 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=0fc9d590-7cd1-4e65-9902-367afa656198 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0fc9d590-7cd1-4e65-9902-367afa656198 policyRevision=24 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=5464360e-ad6a-44c9-be09-9b2991b4d9c2 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5464360e-ad6a-44c9-be09-9b2991b4d9c2 policyRevision=26 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=667e5eb3-be94-47d6-b903-884428c4ebe8 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=667e5eb3-be94-47d6-b903-884428c4ebe8 policyRevision=28 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=d8c0ae6b-c490-41d2-8d63-7b19da985eb0 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d8c0ae6b-c490-41d2-8d63-7b19da985eb0 policyRevision=30 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=302e1ed5-4fdc-49b3-a59f-ca51d19b0f54 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=302e1ed5-4fdc-49b3-a59f-ca51d19b0f54 policyRevision=32 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=4f9abf9f-75d2-4e20-bb33-e65a7060608c subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4f9abf9f-75d2-4e20-bb33-e65a7060608c policyRevision=34 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=9686038c-8750-489a-9c43-bbefcf218bb9 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9686038c-8750-489a-9c43-bbefcf218bb9 policyRevision=36 subsys=daemon
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:38Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:59Z" level=warning msg="service not found" k8sNamespace=kubeintel k8sSvcName=cm-acme-http-solver-cmhbq obj="10.245.224.188:8089/ANY" subsys=k8s-watcher
time="2025-04-27T20:51:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=241c0125-ecf7-45bf-9bae-fb1050afe25c subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=241c0125-ecf7-45bf-9bae-fb1050afe25c policyRevision=38 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=1e0ee758-9abc-498c-a11d-7c6c6f27b04c subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1e0ee758-9abc-498c-a11d-7c6c6f27b04c policyRevision=40 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=78f9e5b7-c0e9-494f-b201-a50cf77e8ba3 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=78f9e5b7-c0e9-494f-b201-a50cf77e8ba3 policyRevision=42 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=d0521085-9936-4b1b-b240-9857b87d6842 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d0521085-9936-4b1b-b240-9857b87d6842 policyRevision=44 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=58cd1899-b3ac-449c-a055-9752928c0114 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=58cd1899-b3ac-449c-a055-9752928c0114 policyRevision=46 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=968a96f0-7193-45c8-9cc9-1b6c47cd1a9e subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=968a96f0-7193-45c8-9cc9-1b6c47cd1a9e policyRevision=48 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=f0d2eca0-d987-4069-acce-fc1b7b29b39a subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f0d2eca0-d987-4069-acce-fc1b7b29b39a policyRevision=50 subsys=daemon
time="2025-05-05T00:12:31Z" level=info msg="Delete endpoint request" containerID=91ef5fafe6 endpointID=2948 k8sNamespace=kubeintel k8sPodName=kubeintel-55955b8c46-bbjbh subsys=daemon
time="2025-05-05T00:12:31Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=kubeintel k8s:app.kubernetes.io/name=kubeintel k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=kubeintel k8s:io.kubernetes.pod.namespace=kubeintel]" subsys=allocator
time="2025-05-05T00:12:31Z" level=info msg="Removed endpoint" containerID=91ef5fafe6 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-05-05T00:19:08Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.04230499267578125 newInterval=57m0s subsys=map-ct
time="2025-05-06T03:54:10Z" level=info msg="Delete endpoint request" containerID=fba121b519 endpointID=1948 k8sNamespace=bikexbike k8sPodName=bikexbike-588cddd898-9dz72 subsys=daemon
time="2025-05-06T03:54:10Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=bikexbike k8s:app.kubernetes.io/name=bikexbike k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-06T03:54:10Z" level=info msg="Removed endpoint" containerID=fba121b519 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Delete endpoint request" containerID=4b4a5b9672 endpointID=3208 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-zg6vm subsys=daemon
time="2025-05-08T21:44:42Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-08T21:44:42Z" level=info msg="Removed endpoint" containerID=4b4a5b9672 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.15 18b4e1eb-4ff7-42ca-8932-a5db78dd3841 default }" containerID=600ed6ccf165cdd8257b93e509ee54c2045c4e16c82f2540a56530fe199e8e1a datapathConfiguration="&{false false false false false <nil>}" interface=lxc59ef080483d6 k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q labels="[]" subsys=daemon sync-build=true
time="2025-05-08T21:44:42Z" level=info msg="New endpoint" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Resolving identity labels (blocking)" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-08T21:44:42Z" level=info msg="Identity of endpoint changed" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q oldIdentity="no identity" subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Waiting for endpoint to be generated" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Rewrote endpoint BPF program" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Successful endpoint creation" containerID=600ed6ccf1 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=daemon
time="2025-05-08T22:04:57Z" level=info msg="Delete endpoint request" containerID=600ed6ccf1 endpointID=3879 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-pxz7q subsys=daemon
time="2025-05-08T22:04:57Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-08T22:04:57Z" level=info msg="Removed endpoint" containerID=600ed6ccf1 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-08T22:41:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=bb90ae07-0508-4036-baa8-5ffceb236efd subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=bb90ae07-0508-4036-baa8-5ffceb236efd policyRevision=52 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=1d294035-1980-4a5c-a452-0f9fb441c513 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d294035-1980-4a5c-a452-0f9fb441c513 policyRevision=54 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=0145b270-6958-4089-a2be-589e5d1ce521 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0145b270-6958-4089-a2be-589e5d1ce521 policyRevision=56 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dfa764e4-17ec-4d42-89be-b82cf0718a6f subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dfa764e4-17ec-4d42-89be-b82cf0718a6f policyRevision=58 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=8c26b573-d94a-4e43-ba20-29da1a02f5c4 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=8c26b573-d94a-4e43-ba20-29da1a02f5c4 policyRevision=60 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=dfa5e6b7-656f-430c-9650-c4bd08bf50d8 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dfa5e6b7-656f-430c-9650-c4bd08bf50d8 policyRevision=62 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=7aa4dcf6-54cb-4818-8ace-20360a51b415 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7aa4dcf6-54cb-4818-8ace-20360a51b415 policyRevision=64 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=e02dceca-c519-4af3-9e57-c1b6b85c23ca subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e02dceca-c519-4af3-9e57-c1b6b85c23ca policyRevision=66 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=5763e644-7ed2-4216-a7a4-25020d63fb56 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5763e644-7ed2-4216-a7a4-25020d63fb56 policyRevision=68 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=87deed5d-6352-46ee-860e-b5cf2630bbe2 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=87deed5d-6352-46ee-860e-b5cf2630bbe2 policyRevision=70 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=59969fbf-e65d-4a24-9de7-e88b92bf4f0f subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=59969fbf-e65d-4a24-9de7-e88b92bf4f0f policyRevision=72 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=19f848ef-640c-4902-97f9-feaf57e7fc12 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=19f848ef-640c-4902-97f9-feaf57e7fc12 policyRevision=74 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=d25069e3-e555-41e3-a0e2-2ca9cba5895a subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d25069e3-e555-41e3-a0e2-2ca9cba5895a policyRevision=76 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=388454d2-58f2-4e52-9a3c-af8c54f5efad subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=388454d2-58f2-4e52-9a3c-af8c54f5efad policyRevision=78 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-20T22:52:49Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:50Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-h5gc9 obj="10.245.119.204:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-d8z46 obj="10.245.21.144:8089/ANY" subsys=k8s-watcher
time="2025-05-20T23:26:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T23:26:22Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=d39d62b8-f716-4cc8-ac1f-a23217085392 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d39d62b8-f716-4cc8-ac1f-a23217085392 policyRevision=80 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=861a1e45-dd14-4122-bf1f-a6cfa97664e4 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=861a1e45-dd14-4122-bf1f-a6cfa97664e4 policyRevision=82 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=b1678a77-8b1e-4f31-8173-2ce2c9f953bb subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b1678a77-8b1e-4f31-8173-2ce2c9f953bb policyRevision=84 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dde50b09-1a02-4069-887e-ad25f66ef42f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dde50b09-1a02-4069-887e-ad25f66ef42f policyRevision=86 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=2e1293f3-727b-43d1-8a57-1f590a798b9f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2e1293f3-727b-43d1-8a57-1f590a798b9f policyRevision=88 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=4c9c715a-2c91-4e21-a839-861ec97de0e9 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4c9c715a-2c91-4e21-a839-861ec97de0e9 policyRevision=90 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=3496cede-bbe2-4cf2-8c14-e8d16efc2862 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=3496cede-bbe2-4cf2-8c14-e8d16efc2862 policyRevision=92 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-22T22:00:42Z" level=info msg="Delete endpoint request" containerID=f3e05d1c5d endpointID=1599 k8sNamespace=kube-system k8sPodName=coredns-854895db77-p9hbd subsys=daemon
time="2025-05-22T22:00:42Z" level=info msg="Releasing key" key="[k8s:doks.digitalocean.com/managed=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=coredns k8s:io.kubernetes.pod.namespace=kube-system k8s:k8s-app=kube-dns]" subsys=allocator
time="2025-05-22T22:00:42Z" level=info msg="Removed endpoint" containerID=f3e05d1c5d datapathPolicyRevision=92 desiredPolicyRevision=78 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.61 b2d8cc9e-4ea8-4894-bcbe-d3ab02c29ef6 default }" containerID=dc5e48ca7d2fb015d3448f72279677556ecfb046b18362a0bf13895b9fbbfacf datapathConfiguration="&{false false false false false <nil>}" interface=lxca532b63104ac k8sPodName=kube-system/coredns-6b79676d8-9569v labels="[]" subsys=daemon sync-build=true
time="2025-05-22T22:00:42Z" level=info msg="New endpoint" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Resolving identity labels (blocking)" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-05-22T22:00:42Z" level=info msg="Identity of endpoint changed" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v oldIdentity="no identity" subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Waiting for endpoint to be generated" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Rewrote endpoint BPF program" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=92 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Successful endpoint creation" containerID=dc5e48ca7d datapathPolicyRevision=92 desiredPolicyRevision=92 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6320599f-8831-436f-a0af-aebca63ea3b0 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6320599f-8831-436f-a0af-aebca63ea3b0 policyRevision=94 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=c1227c40-d350-45b5-8c77-fb356c789259 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c1227c40-d350-45b5-8c77-fb356c789259 policyRevision=96 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=dca1dc14-f9b6-4bc1-bc4c-34129c93a40f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dca1dc14-f9b6-4bc1-bc4c-34129c93a40f policyRevision=98 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=0daf0a66-c6ed-4b34-88f2-7c164071686f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0daf0a66-c6ed-4b34-88f2-7c164071686f policyRevision=100 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=7e246bc7-a405-4c10-96d9-67ae25b8ffe9 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7e246bc7-a405-4c10-96d9-67ae25b8ffe9 policyRevision=102 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=7d18237b-2412-49b3-885b-3fb9b36ba7ac subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7d18237b-2412-49b3-885b-3fb9b36ba7ac policyRevision=104 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=36d121f0-259e-4fc0-9f97-639eeb619115 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=36d121f0-259e-4fc0-9f97-639eeb619115 policyRevision=106 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=d02a2590-a58a-4023-891d-f2035aaee1e6 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d02a2590-a58a-4023-891d-f2035aaee1e6 policyRevision=108 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=9de6c8b4-16e9-447e-a43e-5a8afe50bef9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9de6c8b4-16e9-447e-a43e-5a8afe50bef9 policyRevision=110 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2a4d21ea-6c34-4f80-b83a-83e1a11f8ea9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2a4d21ea-6c34-4f80-b83a-83e1a11f8ea9 policyRevision=112 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=c24f1493-1c87-4ec7-bd4f-36d5f6046c4e subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c24f1493-1c87-4ec7-bd4f-36d5f6046c4e policyRevision=114 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=0c046dc9-cdeb-4072-bd15-711c728c5413 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0c046dc9-cdeb-4072-bd15-711c728c5413 policyRevision=116 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=ce2a9abe-b3e0-4fc8-9534-e0c627cc553f subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ce2a9abe-b3e0-4fc8-9534-e0c627cc553f policyRevision=118 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=e35922a8-7564-43c0-bc00-1977e288b163 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e35922a8-7564-43c0-bc00-1977e288b163 policyRevision=120 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:23Z" level=info msg="Memory available for map entries (0.003% of 4105375744B): 10263439B" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-ct-global-tcp-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-ct-global-any-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-nat-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-neigh-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:04:23Z" level=info msg="option bpf-sock-rev-map-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:04:23Z" level=info msg=" --agent-health-port='9879'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-labels=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-liveness-update-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --agent-not-ready-taint-key='node.cilium.io/agent-not-ready'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allocator-list-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allow-icmp-frag-needed='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --allow-localhost='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --annotate-k8s-node='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --api-rate-limit=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --arping-refresh-period='30s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --auto-create-cilium-node-resource='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --auto-direct-node-routes='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-announce-lb-ip='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-announce-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bgp-config-path='/var/lib/cilium/bgp/config.yaml'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-auth-map-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-global-any-max='262144'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-global-tcp-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-any='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp-fin='10s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-regular-tcp-syn='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-any='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-ct-timeout-service-tcp-grace='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-filter-priority='1'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-fragments-map-max='8192'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-affinity-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-algorithm='random'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dev-ip-addr-inherit=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dsr-dispatch='opt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-dsr-l4-xlate='frontend'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-external-clusterip='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-hash-seed='JLfvgnHc2kaSUFaI'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-maglev-table-size='16381'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-map-max='65536'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-mode='snat'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-proto-diff='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rev-nat-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rss-ipv4-src-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-rss-ipv6-src-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-service-backend-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-service-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-sock='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-sock-hostns-only='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-lb-source-range-map-max='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-map-dynamic-size-ratio='0.0025'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-map-event-buffers=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-nat-global-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-neigh-global-max='524288'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-policy-map-full-reconciliation-interval='15m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-root='/sys/fs/bpf'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bpf-sock-rev-map-max='262144'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --bypass-ip-availability-upon-restore='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --certificates-directory='/var/run/cilium/certs'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cflags=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cgroup-root='/run/cilium/cgroupv2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cilium-endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-health-port='4240'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-id='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-name='default'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cluster-pool-ipv4-mask-size='25'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --clustermesh-config='/var/lib/cilium/clustermesh/'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --clustermesh-ip-identities-sync-timeout='1m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cmdref=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-chaining-mode='portmap'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-chaining-target=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-exclusive='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-external-routing='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --cni-log-file='/var/run/cilium/cilium-cni.log'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config-dir='/tmp/cilium/config-map'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --config-sources='config-map:kube-system/cilium-config'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --conntrack-gc-interval='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --conntrack-gc-max-interval='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --container-ip-local-reserved-ports='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --crd-wait-timeout='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --custom-cni-conf='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --datapath-mode='veth'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --debug='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --debug-verbose=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --derive-masquerade-ip-addr-from-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --devices=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --direct-routing-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-cnp-status-updates='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-endpoint-crd='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-envoy-version-check='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --disable-iptables-feeder-rules=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dns-max-ips-per-restored-rule='1000'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dns-policy-unload-on-shutdown='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-concurrency-limit='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-concurrency-processing-grace-period='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-enable-transparent-mode='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-insecure-skip-transparent-mode-check='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-lock-count='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-lock-timeout='500ms'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --dnsproxy-socket-linger-timeout='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-gateway-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-gateway-reconciliation-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-masquerade-interfaces=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --egress-multi-home-ip-rule-compat='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-auto-protect-node-port-range='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bandwidth-manager='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bbr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bgp-control-plane='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-clock-probe='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-masquerade='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-bpf-tproxy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-api-server-access='*'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-endpoint-slice='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-cilium-health-api-server-access='*'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-custom-calls='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-endpoint-health-checking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-endpoint-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-envoy-config='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-external-ips='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-health-check-nodeport='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-health-checking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-high-scale-ipcache='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-firewall='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-legacy-routing='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-host-port='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-hubble='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-hubble-recorder-api='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-icmp-rules='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-identity-mark='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ip-masq-agent='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec-key-watcher='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipsec-xfrm-state-caching='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-big-tcp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-egress-gateway='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-fragment-tracking='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv4-masquerade='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-big-tcp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-masquerade='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-ipv6-ndp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-api-discovery='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-endpoint-slice='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-event-handover='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-networkpolicy='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-k8s-terminating-endpoint='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-announcements='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-neigh-discovery='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l2-pod-announcements='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-l7-proxy='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-local-node-route='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-local-redirect-policy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-mke='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-monitor='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-nat46x64-gateway='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-node-port='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-pmtu-discovery='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-policy='default'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-recorder='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-remote-node-identity='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-runtime-device-detection='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-sctp='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-service-topology='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-session-affinity='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-srv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-stale-cilium-endpoint-cleanup='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-svc-source-range-check='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-tracing='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-unreachable-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-vtep='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-well-known-identities='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-wireguard='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-wireguard-userspace-fallback='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-xdp-prefilter='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --enable-xt-socket-fallback='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --encrypt-interface=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --encrypt-node='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-queue-size='25'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --endpoint-status=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --envoy-config-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --envoy-log=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --exclude-local-address=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --external-envoy-proxy='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --fixed-identity-mapping=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --fqdn-regex-compile-lru-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --gops-port='9890'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-403-msg=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-idle-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-max-grpc-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-normalize-path='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-request-timeout='3600'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-retry-count='3'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --http-retry-timeout='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-disable-tls='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-event-buffer-capacity='4095'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-event-queue-size='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-compress='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-max-backups='5'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-max-size-mb='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-export-file-path=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-listen-address=':4244'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-metrics=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-metrics-server=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-monitor-events=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-prefer-ipv6='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-recorder-sink-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-recorder-storage-path='/var/run/cilium/pcaps'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-skip-unknown-cgroup-ids='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-socket-path='/var/run/cilium/hubble.sock'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-cert-file='/var/lib/cilium/tls/hubble/server.crt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-client-ca-files='/var/lib/cilium/tls/hubble/client-ca.crt'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --hubble-tls-key-file='/var/lib/cilium/tls/hubble/server.key'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-allocation-mode='crd'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-change-grace-period='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-gc-interval='5m'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-heartbeat-timeout='15m'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --identity-restore-grace-period='10m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-egress-gateway-routes='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-iptables-rules='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --install-no-conntrack-iptables-rules='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ip-allocation-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ip-masq-agent-config-path='/etc/config/ip-masq-agent'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam='cluster-pool'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam-cilium-node-update-rate='15s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipam-multi-pool-pre-allocation='default=8'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipsec-key-file=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipsec-key-rotation-duration='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --iptables-lock-timeout='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --iptables-random-fully='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-native-routing-cidr='10.244.0.0/16'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-node='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-pod-subnets=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-service-loopback-address='169.254.42.1'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv4-service-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-cluster-alloc-cidr='f00d::/64'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-mcast-device=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-native-routing-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-node='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-pod-subnets=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --ipv6-service-range='auto'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --join-cluster='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-api-server='https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-client-burst='10'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-client-qps='5'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-heartbeat-timeout='30s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-kubeconfig-path=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-namespace='kube-system'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-require-ipv4-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-require-ipv6-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-service-cache-size='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-service-proxy-name=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-sync-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --k8s-watcher-endpoint-selector='metadata.name!=kube-scheduler,metadata.name!=kube-controller-manager,metadata.name!=etcd-operator,metadata.name!=gcp-controller-manager'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --keep-config='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kube-proxy-replacement='partial'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kube-proxy-replacement-healthz-bind-address=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-connectivity-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-lease-ttl='15m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-max-consecutive-quorum-errors='2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-opt=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --kvstore-periodic-sync='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-lease-duration='15s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-renew-deadline='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-announcements-retry-period='2s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --l2-pod-announcements-interface=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --label-prefix-file=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --labels=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --lib-dir='/var/lib/cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-max-addr-scope='252'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-router-ipv4=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --local-router-ipv6=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-driver=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-opt=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --log-system-load='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --max-controller-interval='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-enabled='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-mutual-listener-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-rotated-identities-queue-size='1024'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-signal-backoff-duration='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-spiffe-trust-domain='spiffe.cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mesh-auth-spire-admin-socket=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --metrics='+cilium_bpf_map_pressure'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mke-cgroup-mount=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation='medium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation-flags='all'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-aggregation-interval='5s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --monitor-queue-size='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --mtu='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-encryption-opt-out-labels='node-role.kubernetes.io/control-plane'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-algorithm='random'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-bind-protection='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-mode='snat'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --node-port-range='30000,32767'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --nodes-gc-interval='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --operator-api-serve-addr='127.0.0.1:9234'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-audit-mode='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-queue-size='100'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --policy-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof-address='localhost'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --pprof-port='6060'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --preallocate-bpf-maps='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --prepend-iptables-chains='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --procfs='/host/proc'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --prometheus-serve-addr=':9090'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-connect-timeout='2'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-gid='1337'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-idle-timeout-seconds='60'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-max-connection-duration-seconds='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-max-requests-per-connection='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-prometheus-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-xff-num-trusted-hops-egress='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --proxy-xff-num-trusted-hops-ingress='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --read-cni-conf=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --remove-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --restore='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --restored-proxy-ports-age-limit='15'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --route-metric='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --routing-mode='native'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --set-cilium-is-up-condition='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --set-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --sidecar-istio-proxy-image='cilium/istio_proxy'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --single-cluster-route='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --skip-cnp-status-startup-clean='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --socket-path='/var/run/cilium/cilium.sock'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --srv6-encap-mode='reduced'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --state-dir='/var/run/cilium'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --synchronize-k8s-nodes='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-dns-reject-response-code='refused'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-enable-dns-compression='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-endpoint-max-ip-per-hostname='50'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-idle-connection-grace-period='0s'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-max-deferred-connection-deletes='10000'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-min-ttl='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-pre-cache=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-proxy-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tofqdns-proxy-response-max-delay='100ms'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --trace-payloadlen='128'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --trace-sock='true'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel-port='0'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --tunnel-protocol='vxlan'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --unmanaged-pod-watcher-interval='15'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --use-cilium-internal-ip-for-ipsec='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --version='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vlan-bpf-bypass=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-cidr=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-endpoint=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-mac=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --vtep-mask=''" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --wireguard-encapsulate='false'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" --write-cni-conf-when-ready='/host/etc/cni/net.d/05-cilium.conflist'" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" _ _ _" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg=" ___|_| |_|_ _ _____" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="| _| | | | | | |" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="|___|_|_|_|___|_|_|_|" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="Cilium 1.14.18 5418622a22 2024-07-03T11:57:56+02:00 go version go1.22.10 linux/amd64" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="clang (10.0.0) and kernel (6.1.0) versions: OK!" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="linking environment: OK!" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="Kernel config file not found: if the agent fails to start, check the system requirements at https://docs.cilium.io/en/stable/operations/system_requirements" subsys=probes
time="2025-04-17T22:04:23Z" level=info msg="Detected mounted BPF filesystem at /sys/fs/bpf" subsys=bpf
time="2025-04-17T22:04:23Z" level=info msg="Mounted cgroupv2 filesystem at /run/cilium/cgroupv2" subsys=cgroups
time="2025-04-17T22:04:23Z" level=info msg="Parsing base label prefixes from default label list" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg="Parsing additional label prefixes from user inputs: []" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg="Final label prefixes to be used for identity evaluation:" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - reserved:.*" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :io\\.kubernetes\\.pod\\.namespace" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :io\\.cilium\\.k8s\\.namespace\\.labels" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - :app\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:io\\.kubernetes" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:.*beta\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:k8s\\.io" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:pod-template-generation" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:pod-template-hash" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:controller-revision-hash" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:annotation.*" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=" - !:etcd_node" subsys=labels-filter
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=1.427766ms function="pprof.init.func1 (cell.go:50)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="79.663µs" function="gops.registerGopsHooks (cell.go:38)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=3.316235ms function="metrics.init.func1 (cell.go:11)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="6.643µs" function="metrics.init.func2 (cell.go:14)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Spire Delegate API Client is disabled as no socket path is configured" subsys=spire-delegate
time="2025-04-17T22:04:23Z" level=info msg="Mutual authentication handler is disabled as no port is configured" subsys=auth
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration=102.885505ms function="cmd.init.func3 (daemon_main.go:1638)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="45.592µs" function="bgpv1.init.func1 (cell.go:46)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="225.691µs" function="metrics.RegisterCollector (metrics.go:56)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="17.743µs" function="gc.registerSignalHandler (cell.go:47)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="22.921µs" function="utime.initUtimeSync (cell.go:29)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="179.62µs" function="agentliveness.newAgentLivenessUpdater (agent_liveness.go:43)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="81.921µs" function="l2responder.NewL2ResponderReconciler (l2responder.go:63)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Invoked duration="96.452µs" function="garp.newGARPProcessor (processor.go:27)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg=Starting subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Started gops server" address="127.0.0.1:9890" subsys=gops
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="580.162µs" function="gops.registerGopsHooks.func1 (cell.go:43)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="2.241µs" function="metrics.NewRegistry.func1 (registry.go:86)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Establishing connection to apiserver" host="https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com" subsys=k8s-client
time="2025-04-17T22:04:23Z" level=info msg="Serving prometheus metrics on :9090" subsys=metrics
time="2025-04-17T22:04:23Z" level=info msg="Connected to apiserver" subsys=k8s-client
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=14.572699ms function="client.(*compositeClientset).onStart" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=3.989335ms function="authmap.newAuthMap.func1 (cell.go:27)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="95.315µs" function="configmap.newMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="64.329µs" function="signalmap.newMap.func1 (cell.go:44)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="154.459µs" function="nodemap.newNodeMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="141.941µs" function="eventsmap.newEventsMap.func1 (cell.go:35)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="77.192µs" function="*cni.cniConfigManager.Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Generating CNI configuration file with mode portmap" subsys=cni-config
time="2025-04-17T22:04:23Z" level=info msg="Wrote CNI configuration file to /host/etc/cni/net.d/05-cilium.conflist" subsys=cni-config
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=20.80947ms function="datapath.newDatapath.func1 (cells.go:113)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Restored 0 node IDs from the BPF map" subsys=linux-datapath
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="116.525µs" function="datapath.newDatapath.func2 (cells.go:126)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="25.986µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Node].Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="1.406µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNode].Start" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Using autogenerated IPv4 allocation range" subsys=node v4Prefix=10.24.0.0/16
time="2025-04-17T22:04:23Z" level=info msg="no local ciliumnode found, will not restore cilium internal ips from k8s" subsys=daemon
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration=103.976855ms function="node.NewLocalNodeStore.func1 (local_node_store.go:76)" subsys=hive
time="2025-04-17T22:04:23Z" level=info msg="Start hook executed" duration="10.944µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=201.077354ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="5.827µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Using discoveryv1.EndpointSlice" subsys=k8s
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=100.341912ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="7.664µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Pod].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.251µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Namespace].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.029µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="1.374µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumClusterwideNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="2.234µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumCIDRGroup].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="26.98µs" function="endpointmanager.newDefaultEndpointManager.func1 (cell.go:201)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="11.042µs" function="cmd.newPolicyTrifecta.func1 (policy.go:135)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="108.577µs" function="*manager.manager.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Serving cilium node monitor v1.2 API at unix:///var/run/cilium/monitor1_2.sock" subsys=monitor-agent
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="277.892µs" function="agent.newMonitorAgent.func1 (cell.go:61)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="2.176µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumL2AnnouncementPolicy].Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="6.635µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Resoring proxy ports from file failed, falling back to restoring from iptables rules" error="stat /var/run/cilium/state/proxy_ports_state.json: no such file or directory" file-path=/var/run/cilium/state/proxy_ports_state.json subsys=proxy
time="2025-04-17T22:04:24Z" level=info msg="Envoy: Starting xDS gRPC server listening on /var/run/cilium/envoy/sockets/xds.sock" subsys=envoy-manager
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=2.175944ms function="proxy.newProxy.func1 (cell.go:63)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="186.337µs" function="signal.provideSignalManager.func1 (cell.go:25)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Datapath signal listener running" subsys=signal
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration=1.242006ms function="auth.registerAuthManager.func1 (cell.go:109)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="27.245µs" function="auth.registerGCJobs.func1 (cell.go:158)" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Start hook executed" duration="10.722µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:24Z" level=info msg="Using Managed Neighbor Kernel support" subsys=daemon
time="2025-04-17T22:04:24Z" level=warning msg="Deprecated value for --kube-proxy-replacement: partial (use either \"true\", or \"false\")" subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="Inheriting MTU from external network interface" device=eth1 ipAddr=10.108.0.3 mtu=1500 subsys=mtu
time="2025-04-17T22:04:24Z" level=info msg="Local boot ID is \"80a520c6-e29e-4fba-b83f-e22eb5be2068\"" subsys=node
time="2025-04-17T22:04:24Z" level=info msg="Removed map pin at /sys/fs/bpf/tc/globals/cilium_ipcache, recreating and re-pinning map cilium_ipcache" file-path=/sys/fs/bpf/tc/globals/cilium_ipcache name=cilium_ipcache subsys=bpf
time="2025-04-17T22:04:24Z" level=info msg="Restored services from maps" failedServices=0 restoredServices=0 subsys=service
time="2025-04-17T22:04:24Z" level=info msg="Restored backends from maps" failedBackends=0 restoredBackends=0 skippedBackends=0 subsys=service
time="2025-04-17T22:04:24Z" level=info msg="Reading old endpoints..." subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="No old endpoints found." subsys=daemon
time="2025-04-17T22:04:24Z" level=info msg="Waiting until all Cilium CRDs are available" subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="All Cilium CRDs have been found and are available" subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-65529\" not found" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-65529\" not found" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Successfully created CiliumNode resource" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=warning msg="Unable to create CiliumNode resource, will retry" error="ciliumnodes.cilium.io \"system-0-65529\" already exists" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Retrieved node information from cilium node" nodeName=system-0-65529 subsys=k8s
time="2025-04-17T22:04:25Z" level=info msg="Received own node information from API server" ipAddr.ipv4=10.108.0.3 ipAddr.ipv6="<nil>" k8sNodeIP=10.108.0.3 labels="map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0 doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-65529 kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3]" nodeName=system-0-65529 subsys=k8s v4Prefix=10.244.1.0/25 v6Prefix="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="k8s mode: Allowing localhost to reach local endpoints" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Detected devices" devices="[]" subsys=linux-datapath
time="2025-04-17T22:04:25Z" level=info msg="Enabling k8s event listener" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Removing stale endpoint interfaces" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Waiting until local node addressing before starting watchers depending on it" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Skipping kvstore configuration" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Initializing node addressing" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Initializing cluster-pool IPAM" subsys=ipam v4Prefix=10.244.1.0/25 v6Prefix="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="Restoring endpoints..." subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Endpoints restored" failed=0 restored=0 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Addressing information:" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Cluster-Name: default" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Cluster-ID: 0" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Local node-name: system-0-65529" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Node-IPv6: <nil>" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" External-Node IPv4: 10.108.0.3" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Internal-Node IPv4: 10.244.1.72" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" IPv4 allocation prefix: 10.244.1.0/25" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" IPv4 native routing prefix: 10.244.0.0/16" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Loopback IPv4: 169.254.42.1" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" Local IPv4 addresses:" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 164.90.136.24" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 10.17.0.5" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 10.108.0.3" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg=" - 164.90.136.24" subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-65529 subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Adding local node to cluster" node="{system-0-65529 default [{ExternalIP 164.90.136.24} {InternalIP 10.108.0.3} {CiliumInternalIP 10.244.1.72} {ExternalIP 164.90.136.24}] 10.244.1.0/25 [] <nil> [] 10.244.1.44 <nil> <nil> <nil> 0 local 0 map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0 doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-65529 kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3] map[] 1 80a520c6-e29e-4fba-b83f-e22eb5be2068}" subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=2059c76d-348b-4c2b-a795-55f75c26b77c subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2059c76d-348b-4c2b-a795-55f75c26b77c policyRevision=2 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=c8b2e396-2466-40f7-b27e-e6cfc7fcf578 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c8b2e396-2466-40f7-b27e-e6cfc7fcf578 policyRevision=3 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=c3537988-b97d-4ec4-87ed-ea0a7d07dc11 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c3537988-b97d-4ec4-87ed-ea0a7d07dc11 policyRevision=4 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=5132c0a1-3303-464e-b45a-6eb3e1dde804 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5132c0a1-3303-464e-b45a-6eb3e1dde804 policyRevision=5 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=536870ae-268e-4478-a339-778448ec116d subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=536870ae-268e-4478-a339-778448ec116d policyRevision=6 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=533deec1-93e5-491b-bb28-cf3d8e70c612 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=533deec1-93e5-491b-bb28-cf3d8e70c612 policyRevision=7 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=1520cbd5-1199-4059-8766-9cdb0fcece84 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1520cbd5-1199-4059-8766-9cdb0fcece84 policyRevision=8 subsys=daemon
time="2025-04-17T22:04:25Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-65529 subsys=nodediscovery
time="2025-04-17T22:04:25Z" level=info msg="Waiting until all pre-existing resources have been received" subsys=k8s-watcher
time="2025-04-17T22:04:25Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:04:25Z" level=info msg="Annotating k8s node" subsys=daemon v4CiliumHostIP.IPv4=10.244.1.72 v4IngressIP.IPv4="<nil>" v4Prefix=10.244.1.0/25 v4healthIP.IPv4=10.244.1.44 v6CiliumHostIP.IPv6="<nil>" v6IngressIP.IPv6="<nil>" v6Prefix="<nil>" v6healthIP.IPv6="<nil>"
time="2025-04-17T22:04:25Z" level=info msg="Initializing identity allocator" subsys=identity-cache
time="2025-04-17T22:04:25Z" level=info msg="Allocating identities between range" cluster-id=0 max=65535 min=256 subsys=identity-cache
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.forwarding sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.accept_local sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.send_redirects sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.forwarding sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.accept_local sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.send_redirects sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.core.bpf_jit_enable sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.all.rp_filter sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.fib_multipath_use_neigh sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.unprivileged_bpf_disabled sysParamValue=1
time="2025-04-17T22:04:25Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.timer_migration sysParamValue=0
time="2025-04-17T22:04:25Z" level=info msg="Setting up BPF datapath" bpfClockSource=ktime bpfInsnSet="<nil>" subsys=datapath-loader
time="2025-04-17T22:04:26Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:04:26Z" level=info msg="Iptables rules installed" subsys=iptables
time="2025-04-17T22:04:26Z" level=info msg="Adding new proxy port rules for cilium-dns-egress:40061" id=cilium-dns-egress subsys=proxy
time="2025-04-17T22:04:26Z" level=info msg="Iptables proxy rules installed" subsys=iptables
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration=2.222971128s function="cmd.newDaemonPromise.func1 (daemon_main.go:1694)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="44.35µs" function="utime.initUtimeSync.func1 (cell.go:33)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="7.549µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Starting IP identity watcher" subsys=ipcache
time="2025-04-17T22:04:26Z" level=info msg="Initializing daemon" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Validating configured node address ranges" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Starting connection tracking garbage collector" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Initial scan of connection tracking completed" subsys=ct-gc
time="2025-04-17T22:04:26Z" level=info msg="Regenerating restored endpoints" numRestored=0 subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Creating host endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1715 identity=1 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=b67bdbe9-bd78-4fe6-b6cf-afe9bb225ad0,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Launching Cilium health daemon" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Launching Cilium health endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Started healthz status API server" address="127.0.0.1:9879" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Processing queued endpoint deletion requests from /var/run/cilium/deleteQueue" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="processing 0 queued deletion requests" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Initializing Cilium API" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="173.706µs" function="l2respondermap.newMap.func1 (l2_responder_map4.go:44)" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Start hook executed" duration="6.971µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:04:26Z" level=info msg="Finished regenerating restored endpoints" regenerated=0 subsys=daemon total=0
time="2025-04-17T22:04:26Z" level=info msg="Deleted orphan backends" orphanBackends=0 subsys=service
time="2025-04-17T22:04:26Z" level=info msg="Removed stale bpf map" file-path=/sys/fs/bpf/tc/globals/cilium_lb4_source_range subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Cleaning up Cilium health endpoint" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Daemon initialization completed" bootstrapTime=3.32915262s subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Configuring Hubble server" eventQueueSize=2048 maxFlows=4095 subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Starting local Hubble server" address="unix:///var/run/cilium/hubble.sock" subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Beginning to read perf buffer" startTime="2025-04-17 22:04:26.488904612 +0000 UTC m=+3.389865571" subsys=monitor-agent
time="2025-04-17T22:04:26Z" level=info msg="Starting Hubble server" address=":4244" subsys=hubble
time="2025-04-17T22:04:26Z" level=info msg="Serving cilium API at unix:///var/run/cilium/cilium.sock" subsys=daemon
time="2025-04-17T22:04:26Z" level=info msg="Create endpoint request" addressing="&{10.244.1.28 51e14210-aae1-4e0b-85b3-8db00ab678ed default }" containerID=e5ccc336169ffbc10803b8ffd55d2c82edf18dfd761be28cb26489b2ff8e2d9e datapathConfiguration="&{false false false false false <nil>}" interface=lxc44b50984441e k8sPodName=kube-system/konnectivity-agent-qrsjc labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:26Z" level=info msg="New endpoint" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Resolving identity labels (blocking)" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=konnectivity-agent;" subsys=allocator
time="2025-04-17T22:04:26Z" level=info msg="Identity of endpoint changed" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identity=32430 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Waiting for endpoint to be generated" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:26Z" level=info msg="Compiled new BPF template" BPFCompilationTime=448.864037ms file-path=/var/run/cilium/state/templates/38947ca1b98bdc798c90e04502afdc182aef39dd080a3b23edb6bffd04e607ac/bpf_host.o subsys=datapath-loader
time="2025-04-17T22:04:26Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1715 identity=1 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:04:27Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 identityLabels="reserved:health" ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:27Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=162 identity=4 identityLabels="reserved:health" ipv4=10.244.1.44 ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Compiled new BPF template" BPFCompilationTime=1.599029024s file-path=/var/run/cilium/state/templates/756338fe133d4c92642f65cd7b1cfff69481ecf0ee39d710b520c903ca69d266/bpf_lxc.o subsys=datapath-loader
time="2025-04-17T22:04:28Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=162 identity=4 ipv4=10.244.1.44 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Rewrote endpoint BPF program" containerID=e5ccc33616 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=endpoint
time="2025-04-17T22:04:28Z" level=info msg="Successful endpoint creation" containerID=e5ccc33616 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=541 identity=32430 ipv4=10.244.1.28 ipv6= k8sPodName=kube-system/konnectivity-agent-qrsjc subsys=daemon
time="2025-04-17T22:04:28Z" level=info msg="Serving cilium health API at unix:///var/run/cilium/health.sock" subsys=health-server
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.42 237daac6-3b67-41fb-978e-b0d6b66588b3 default }" containerID=0a8850748733ff6c28587ecdf65266d4741a301b49375a62bf9c7435239be287 datapathConfiguration="&{false false false false false <nil>}" interface=lxc42d2980c1128 k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identityLabels="k8s:app.kubernetes.io/name=argocd-notifications-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-notifications-controller;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identity=23481 identityLabels="k8s:app.kubernetes.io/name=argocd-notifications-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-notifications-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.55 9301b5a0-d149-46a6-af14-78430687825d default }" containerID=95d7216a41490e02f87fb2e992471ce0cf75b78dc85e380fd90171f4bc9bf6d0 datapathConfiguration="&{false false false false false <nil>}" interface=lxc2366a1a5d536 k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identityLabels="k8s:app.kubernetes.io/name=argocd-dex-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-dex-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identity=63450 identityLabels="k8s:app.kubernetes.io/name=argocd-dex-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-dex-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.82 2669e1f1-246e-42e3-9135-c44d5a4f97b9 default }" containerID=dc5a0589c22c957a7550e064268f256ef73f8cf3f3f7f153257d553d528e10ba datapathConfiguration="&{false false false false false <nil>}" interface=lxcfc40e706c1f1 k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=kubeintel;k8s:app.kubernetes.io/name=kubeintel;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=kubeintel;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=0a88507487 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=0a88507487 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3729 identity=23481 ipv4=10.244.1.42 ipv6= k8sPodName=argocd/argocd-notifications-controller-944c45fb-bqtql subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.70 a75f583a-6ecc-4514-bd1a-f3cb9811aa59 default }" containerID=171e582722ff78cbb3e2803d9c64f86db914531dd41a8674d807a8730ff2add7 datapathConfiguration="&{false false false false false <nil>}" interface=lxcddc148ee97ea k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identityLabels="k8s:app.kubernetes.io/name=argocd-applicationset-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-applicationset-controller;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identity=57071 identityLabels="k8s:app.kubernetes.io/name=argocd-applicationset-controller,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-applicationset-controller,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=dc5a0589c2 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=dc5a0589c2 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=4061 identity=23569 ipv4=10.244.1.82 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-z7lt2 subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Rewrote endpoint BPF program" containerID=95d7216a41 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.87 b376e6a5-2bc5-4707-a20f-45f607f52a0d default }" containerID=ecf5a88435a9208312b1bbf92356372420f9d0a9364a68f921674f8d1da6c206 datapathConfiguration="&{false false false false false <nil>}" interface=lxc52472155addf k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:40Z" level=info msg="New endpoint" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Successful endpoint creation" containerID=95d7216a41 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1525 identity=63450 ipv4=10.244.1.55 ipv6= k8sPodName=argocd/argocd-dex-server-6c588b4db8-h5pl8 subsys=daemon
time="2025-04-17T22:04:40Z" level=info msg="Resolving identity labels (blocking)" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identityLabels="k8s:app.kubernetes.io/name=argocd-redis,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-redis;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:40Z" level=info msg="Identity of endpoint changed" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identity=3754 identityLabels="k8s:app.kubernetes.io/name=argocd-redis,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-redis,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:40Z" level=info msg="Waiting for endpoint to be generated" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=171e582722 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=171e582722 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2840 identity=57071 ipv4=10.244.1.70 ipv6= k8sPodName=argocd/argocd-applicationset-controller-5f7d6d48fb-55fbq subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=ecf5a88435 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=ecf5a88435 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=4089 identity=3754 ipv4=10.244.1.87 ipv6= k8sPodName=argocd/argocd-redis-78d79b866f-7hdk5 subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.123 643c8582-352f-4555-85d1-3c520372aa40 default }" containerID=f3e05d1c5d95cce83c74785bb93c8dfb6e46d9960f716a195793daffe129bd4c datapathConfiguration="&{false false false false false <nil>}" interface=lxcf06a3fb206e6 k8sPodName=kube-system/coredns-854895db77-p9hbd labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.12 75d8c3dd-78e6-47fc-b485-fce5f2057cd2 default }" containerID=cbc1505e67f1c58a570cbf0c3266fca29a6e7b3edc9184a2316e3111e2c76d77 datapathConfiguration="&{false false false false false <nil>}" interface=lxc50c1d4ac5f15 k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identityLabels="k8s:app.kubernetes.io/component=controller,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cert-manager,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cert-manager,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app=cert-manager;k8s:app.kubernetes.io/component=controller;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=cert-manager;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identity=509 identityLabels="k8s:app.kubernetes.io/component=controller,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cert-manager,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cert-manager,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=f3e05d1c5d datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=f3e05d1c5d datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=cbc1505e67 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=cbc1505e67 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3225 identity=509 ipv4=10.244.1.12 ipv6= k8sPodName=cert-manager/cert-manager-5798486f6b-qppb7 subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.6 2e88355d-526b-46d0-ae5c-463fdcdb24ba default }" containerID=4b4a5b96722a7cf5999e2ee9368e8d93c0649a59c45a50faf1733bd6aa69c19a datapathConfiguration="&{false false false false false <nil>}" interface=lxc424bd7f452fa k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=4b4a5b9672 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=4b4a5b9672 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.125 de79be76-1a86-4e99-bda7-0df4ac52e8cc default }" containerID=d4eda2588c9aae6fb4d96f15064135d9b91e353e28c16d4de411eb58e27e64c3 datapathConfiguration="&{false false false false false <nil>}" interface=lxcd94c723e0e8f k8sPodName=argocd/argocd-server-6f9745ff7-kj46x labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identityLabels="k8s:app.kubernetes.io/name=argocd-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identity=1930 identityLabels="k8s:app.kubernetes.io/name=argocd-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Rewrote endpoint BPF program" containerID=d4eda2588c datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Successful endpoint creation" containerID=d4eda2588c datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=849 identity=1930 ipv4=10.244.1.125 ipv6= k8sPodName=argocd/argocd-server-6f9745ff7-kj46x subsys=daemon
time="2025-04-17T22:04:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.112 7ca0b46e-3484-49a2-ad4c-df150f7624ba default }" containerID=bf96af1bf079817ca7f1de6393ec7cede5086ad8ebebb76487d6aeac5f53599a datapathConfiguration="&{false false false false false <nil>}" interface=lxce8d72ff3b26a k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:41Z" level=info msg="New endpoint" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Resolving identity labels (blocking)" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=bikexbike;k8s:app.kubernetes.io/name=bikexbike;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-04-17T22:04:41Z" level=info msg="Identity of endpoint changed" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:41Z" level=info msg="Waiting for endpoint to be generated" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=bf96af1bf0 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=bf96af1bf0 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=561 identity=29331 ipv4=10.244.1.112 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-t6fjv subsys=daemon
time="2025-04-17T22:04:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.59 6f9a5746-68c6-4921-84e4-50622b780e18 default }" containerID=20659f8a6d46fde4e0f82429e970ee21a032f6289ddfe65c318c2240110adc11 datapathConfiguration="&{false false false false false <nil>}" interface=lxc548fd48d3b44 k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:42Z" level=info msg="New endpoint" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Resolving identity labels (blocking)" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identityLabels="k8s:app.kubernetes.io/name=hubble-ui,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-ui" ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=hubble-ui;k8s:app.kubernetes.io/part-of=cilium;k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=hubble-ui;" subsys=allocator
time="2025-04-17T22:04:42Z" level=info msg="Identity of endpoint changed" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identity=1382 identityLabels="k8s:app.kubernetes.io/name=hubble-ui,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-ui,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-ui" ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Waiting for endpoint to be generated" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=20659f8a6d datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=20659f8a6d datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=3850 identity=1382 ipv4=10.244.1.59 ipv6= k8sPodName=kube-system/hubble-ui-5f4497c6b9-z49c8 subsys=daemon
time="2025-04-17T22:04:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.45 9c1b71fc-815d-47ec-9f92-96cbfea24d27 default }" containerID=47fe591a371cb0c903a8cab3b3db2f4f3e06eae370f84dbcb9ad98a9a50a71aa datapathConfiguration="&{false false false false false <nil>}" interface=lxc204d68d7fe54 k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:42Z" level=info msg="New endpoint" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Resolving identity labels (blocking)" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identityLabels="k8s:app.kubernetes.io/component=cainjector,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cainjector,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cainjector,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Reusing existing global key" key="k8s:app=cainjector;k8s:app.kubernetes.io/component=cainjector;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=cainjector;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:42Z" level=info msg="Identity of endpoint changed" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identity=2984 identityLabels="k8s:app.kubernetes.io/component=cainjector,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=cainjector,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=cainjector,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-cainjector,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Waiting for endpoint to be generated" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Rewrote endpoint BPF program" containerID=47fe591a37 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=endpoint
time="2025-04-17T22:04:42Z" level=info msg="Successful endpoint creation" containerID=47fe591a37 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=276 identity=2984 ipv4=10.244.1.45 ipv6= k8sPodName=cert-manager/cert-manager-cainjector-7666685ff5-7vpqk subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.120 bded704d-05c4-4050-a5ee-0eed5c6120c4 default }" containerID=e46fee48dd0c6a1ea84f75f089b5fe611b89fa29aca227edd6413a23c006512e datapathConfiguration="&{false false false false false <nil>}" interface=lxcee0fb6be79a7 k8sPodName=argocd/argocd-application-controller-0 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identityLabels="k8s:app.kubernetes.io/name=argocd-application-controller,k8s:apps.kubernetes.io/pod-index=0,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller,k8s:io.kubernetes.pod.namespace=argocd,k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0" ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-application-controller;k8s:apps.kubernetes.io/pod-index=0;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller;k8s:io.kubernetes.pod.namespace=argocd;k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identity=15523 identityLabels="k8s:app.kubernetes.io/name=argocd-application-controller,k8s:apps.kubernetes.io/pod-index=0,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-application-controller,k8s:io.kubernetes.pod.namespace=argocd,k8s:statefulset.kubernetes.io/pod-name=argocd-application-controller-0" ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=e46fee48dd datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=e46fee48dd datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=497 identity=15523 ipv4=10.244.1.120 ipv6= k8sPodName=argocd/argocd-application-controller-0 subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.26 be15d53a-60ce-46bf-92c2-6bc683f760bd default }" containerID=575c2bfcdf037c4c311013c985613f9bb2ce3407df56e4707ad44a86637b6364 datapathConfiguration="&{false false false false false <nil>}" interface=lxc7925e256ff1d k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identityLabels="k8s:app.kubernetes.io/name=argocd-repo-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=argocd-repo-server;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server;k8s:io.kubernetes.pod.namespace=argocd;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identity=10654 identityLabels="k8s:app.kubernetes.io/name=argocd-repo-server,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=argocd,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=argocd-repo-server,k8s:io.kubernetes.pod.namespace=argocd" ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=575c2bfcdf datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=575c2bfcdf datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=70 identity=10654 ipv4=10.244.1.26 ipv6= k8sPodName=argocd/argocd-repo-server-5b6f44d978-4gnpc subsys=daemon
time="2025-04-17T22:04:43Z" level=info msg="Create endpoint request" addressing="&{10.244.1.43 612ba327-6579-40cf-97fc-62783dee8bf9 default }" containerID=af7c770ec426f73a5ffacb2513ead6ed472d9279323bf4cde554c7e6beb8af8c datapathConfiguration="&{false false false false false <nil>}" interface=lxc7a60b9024355 k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:43Z" level=info msg="New endpoint" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Resolving identity labels (blocking)" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identityLabels="k8s:app.kubernetes.io/name=hubble-relay,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-relay" ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/name=hubble-relay;k8s:app.kubernetes.io/part-of=cilium;k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=hubble-relay;" subsys=allocator
time="2025-04-17T22:04:43Z" level=info msg="Identity of endpoint changed" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identity=14492 identityLabels="k8s:app.kubernetes.io/name=hubble-relay,k8s:app.kubernetes.io/part-of=cilium,k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=hubble-relay,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=hubble-relay" ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Waiting for endpoint to be generated" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Rewrote endpoint BPF program" containerID=af7c770ec4 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=endpoint
time="2025-04-17T22:04:43Z" level=info msg="Successful endpoint creation" containerID=af7c770ec4 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=247 identity=14492 ipv4=10.244.1.43 ipv6= k8sPodName=kube-system/hubble-relay-fbcb88677-9p8rl subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.73 9aecd771-469c-4014-8093-84a9b9cc6e0b default }" containerID=fba121b519e4fb399e8822d635ebb01052ca3160a39c20ecb9b8c5e1fbc71e4e datapathConfiguration="&{false false false false false <nil>}" interface=lxcc0bf1d2fa535 k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Rewrote endpoint BPF program" containerID=fba121b519 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Successful endpoint creation" containerID=fba121b519 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.27 33e3f78f-9e7e-4e54-895f-1817802961b3 default }" containerID=468cd28bdd721e4712b2cd99a4d2a6f2620eb2814ef959f54807defc45703877 datapathConfiguration="&{false false false false false <nil>}" interface=lxcc810e996b982 k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identityLabels="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=metrics-server,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=metrics-server" ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Reusing existing global key" key="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=metrics-server;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=metrics-server;" subsys=allocator
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identity=49023 identityLabels="k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=metrics-server,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=metrics-server" ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Rewrote endpoint BPF program" containerID=468cd28bdd datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Successful endpoint creation" containerID=468cd28bdd datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2983 identity=49023 ipv4=10.244.1.27 ipv6= k8sPodName=kube-system/metrics-server-6d94bc8694-bktgr subsys=daemon
time="2025-04-17T22:04:44Z" level=info msg="Create endpoint request" addressing="&{10.244.1.46 fc10a50f-7f70-40d7-b3f1-1015ccd19000 default }" containerID=5a2994da5aec4e2b9a49fac6c3f25e2fe46407d2f89711dce6341f791db2ac42 datapathConfiguration="&{false false false false false <nil>}" interface=lxc89bdec34ed14 k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:44Z" level=info msg="New endpoint" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Resolving identity labels (blocking)" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identityLabels="k8s:app.kubernetes.io/component=webhook,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=webhook,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=webhook,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Reusing existing global key" key="k8s:app=webhook;k8s:app.kubernetes.io/component=webhook;k8s:app.kubernetes.io/instance=cert-manager;k8s:app.kubernetes.io/name=webhook;k8s:app.kubernetes.io/version=v1.15.1;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook;k8s:io.kubernetes.pod.namespace=cert-manager;" subsys=allocator
time="2025-04-17T22:04:44Z" level=info msg="Identity of endpoint changed" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identity=64008 identityLabels="k8s:app.kubernetes.io/component=webhook,k8s:app.kubernetes.io/instance=cert-manager,k8s:app.kubernetes.io/name=webhook,k8s:app.kubernetes.io/version=v1.15.1,k8s:app=webhook,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=cert-manager,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=cert-manager-webhook,k8s:io.kubernetes.pod.namespace=cert-manager" ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:44Z" level=info msg="Waiting for endpoint to be generated" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Rewrote endpoint BPF program" containerID=5a2994da5a datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Successful endpoint creation" containerID=5a2994da5a datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=104 identity=64008 ipv4=10.244.1.46 ipv6= k8sPodName=cert-manager/cert-manager-webhook-5f594df789-xgcjl subsys=daemon
time="2025-04-17T22:04:45Z" level=info msg="Create endpoint request" addressing="&{10.244.1.56 48e794a8-1e5d-4442-b945-55ba359483a6 default }" containerID=91ef5fafe63d82f38dd626c75d7124fa985e4255cd589661c53c8e6852f03648 datapathConfiguration="&{false false false false false <nil>}" interface=lxc1100eb0f0490 k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:04:45Z" level=info msg="New endpoint" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Resolving identity labels (blocking)" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Identity of endpoint changed" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Waiting for endpoint to be generated" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Rewrote endpoint BPF program" containerID=91ef5fafe6 datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-04-17T22:04:45Z" level=info msg="Successful endpoint creation" containerID=91ef5fafe6 datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=daemon
time="2025-04-17T22:05:15Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:15Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:06:16Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:06:56Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:09:26Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00528717041015625 newInterval=7m30s subsys=map-ct
time="2025-04-17T22:16:56Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01020050048828125 newInterval=11m15s subsys=map-ct
time="2025-04-17T22:28:11Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01514434814453125 newInterval=16m53s subsys=map-ct
time="2025-04-17T22:45:04Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.02292633056640625 newInterval=25m20s subsys=map-ct
time="2025-04-17T23:10:24Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0340576171875 newInterval=38m0s subsys=map-ct
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6e6a1ffb-258f-4b35-b15a-596f02831268 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6e6a1ffb-258f-4b35-b15a-596f02831268 policyRevision=10 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=e04f7347-843d-445c-8900-7f47a79ce241 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e04f7347-843d-445c-8900-7f47a79ce241 policyRevision=12 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=52caf630-c483-4769-846b-21ad91515baa subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=52caf630-c483-4769-846b-21ad91515baa policyRevision=14 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=3ffb3674-0687-493d-8026-84e6bcb16bb9 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=3ffb3674-0687-493d-8026-84e6bcb16bb9 policyRevision=16 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=2eb75a51-e804-4d38-a1e4-ee6c0a6cb604 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2eb75a51-e804-4d38-a1e4-ee6c0a6cb604 policyRevision=18 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=304e2e2d-20e2-41c5-8a93-2ba8656df5f6 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=304e2e2d-20e2-41c5-8a93-2ba8656df5f6 policyRevision=20 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=95c15523-625d-4201-9836-ed68eb9951ac subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=95c15523-625d-4201-9836-ed68eb9951ac policyRevision=22 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=0fc9d590-7cd1-4e65-9902-367afa656198 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0fc9d590-7cd1-4e65-9902-367afa656198 policyRevision=24 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=5464360e-ad6a-44c9-be09-9b2991b4d9c2 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5464360e-ad6a-44c9-be09-9b2991b4d9c2 policyRevision=26 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=667e5eb3-be94-47d6-b903-884428c4ebe8 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=667e5eb3-be94-47d6-b903-884428c4ebe8 policyRevision=28 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=d8c0ae6b-c490-41d2-8d63-7b19da985eb0 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d8c0ae6b-c490-41d2-8d63-7b19da985eb0 policyRevision=30 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=302e1ed5-4fdc-49b3-a59f-ca51d19b0f54 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=302e1ed5-4fdc-49b3-a59f-ca51d19b0f54 policyRevision=32 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=4f9abf9f-75d2-4e20-bb33-e65a7060608c subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4f9abf9f-75d2-4e20-bb33-e65a7060608c policyRevision=34 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=9686038c-8750-489a-9c43-bbefcf218bb9 subsys=daemon
time="2025-04-22T07:41:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9686038c-8750-489a-9c43-bbefcf218bb9 policyRevision=36 subsys=daemon
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:38Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:59Z" level=warning msg="service not found" k8sNamespace=kubeintel k8sSvcName=cm-acme-http-solver-cmhbq obj="10.245.224.188:8089/ANY" subsys=k8s-watcher
time="2025-04-27T20:51:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=241c0125-ecf7-45bf-9bae-fb1050afe25c subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=241c0125-ecf7-45bf-9bae-fb1050afe25c policyRevision=38 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=1e0ee758-9abc-498c-a11d-7c6c6f27b04c subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1e0ee758-9abc-498c-a11d-7c6c6f27b04c policyRevision=40 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=78f9e5b7-c0e9-494f-b201-a50cf77e8ba3 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=78f9e5b7-c0e9-494f-b201-a50cf77e8ba3 policyRevision=42 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=d0521085-9936-4b1b-b240-9857b87d6842 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d0521085-9936-4b1b-b240-9857b87d6842 policyRevision=44 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=58cd1899-b3ac-449c-a055-9752928c0114 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=58cd1899-b3ac-449c-a055-9752928c0114 policyRevision=46 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=968a96f0-7193-45c8-9cc9-1b6c47cd1a9e subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=968a96f0-7193-45c8-9cc9-1b6c47cd1a9e policyRevision=48 subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=f0d2eca0-d987-4069-acce-fc1b7b29b39a subsys=daemon
time="2025-04-29T12:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f0d2eca0-d987-4069-acce-fc1b7b29b39a policyRevision=50 subsys=daemon
time="2025-05-05T00:12:31Z" level=info msg="Delete endpoint request" containerID=91ef5fafe6 endpointID=2948 k8sNamespace=kubeintel k8sPodName=kubeintel-55955b8c46-bbjbh subsys=daemon
time="2025-05-05T00:12:31Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=kubeintel k8s:app.kubernetes.io/name=kubeintel k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=kubeintel k8s:io.kubernetes.pod.namespace=kubeintel]" subsys=allocator
time="2025-05-05T00:12:31Z" level=info msg="Removed endpoint" containerID=91ef5fafe6 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=2948 identity=23569 ipv4=10.244.1.56 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-bbjbh subsys=endpoint
time="2025-05-05T00:19:08Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.04230499267578125 newInterval=57m0s subsys=map-ct
time="2025-05-06T03:54:10Z" level=info msg="Delete endpoint request" containerID=fba121b519 endpointID=1948 k8sNamespace=bikexbike k8sPodName=bikexbike-588cddd898-9dz72 subsys=daemon
time="2025-05-06T03:54:10Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=bikexbike k8s:app.kubernetes.io/name=bikexbike k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-06T03:54:10Z" level=info msg="Removed endpoint" containerID=fba121b519 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=1948 identity=29331 ipv4=10.244.1.73 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-9dz72 subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Delete endpoint request" containerID=4b4a5b9672 endpointID=3208 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-zg6vm subsys=daemon
time="2025-05-08T21:44:42Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-08T21:44:42Z" level=info msg="Removed endpoint" containerID=4b4a5b9672 datapathPolicyRevision=50 desiredPolicyRevision=36 endpointID=3208 identity=60447 ipv4=10.244.1.6 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-zg6vm subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.15 18b4e1eb-4ff7-42ca-8932-a5db78dd3841 default }" containerID=600ed6ccf165cdd8257b93e509ee54c2045c4e16c82f2540a56530fe199e8e1a datapathConfiguration="&{false false false false false <nil>}" interface=lxc59ef080483d6 k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q labels="[]" subsys=daemon sync-build=true
time="2025-05-08T21:44:42Z" level=info msg="New endpoint" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Resolving identity labels (blocking)" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-08T21:44:42Z" level=info msg="Identity of endpoint changed" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q oldIdentity="no identity" subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Waiting for endpoint to be generated" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Rewrote endpoint BPF program" containerID=600ed6ccf1 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T21:44:42Z" level=info msg="Successful endpoint creation" containerID=600ed6ccf1 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=daemon
time="2025-05-08T22:04:57Z" level=info msg="Delete endpoint request" containerID=600ed6ccf1 endpointID=3879 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-pxz7q subsys=daemon
time="2025-05-08T22:04:57Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-08T22:04:57Z" level=info msg="Removed endpoint" containerID=600ed6ccf1 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=3879 identity=60447 ipv4=10.244.1.15 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-pxz7q subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-08T22:41:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=bb90ae07-0508-4036-baa8-5ffceb236efd subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=bb90ae07-0508-4036-baa8-5ffceb236efd policyRevision=52 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=1d294035-1980-4a5c-a452-0f9fb441c513 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d294035-1980-4a5c-a452-0f9fb441c513 policyRevision=54 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=0145b270-6958-4089-a2be-589e5d1ce521 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0145b270-6958-4089-a2be-589e5d1ce521 policyRevision=56 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dfa764e4-17ec-4d42-89be-b82cf0718a6f subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dfa764e4-17ec-4d42-89be-b82cf0718a6f policyRevision=58 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=8c26b573-d94a-4e43-ba20-29da1a02f5c4 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=8c26b573-d94a-4e43-ba20-29da1a02f5c4 policyRevision=60 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=dfa5e6b7-656f-430c-9650-c4bd08bf50d8 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dfa5e6b7-656f-430c-9650-c4bd08bf50d8 policyRevision=62 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=7aa4dcf6-54cb-4818-8ace-20360a51b415 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7aa4dcf6-54cb-4818-8ace-20360a51b415 policyRevision=64 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=e02dceca-c519-4af3-9e57-c1b6b85c23ca subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e02dceca-c519-4af3-9e57-c1b6b85c23ca policyRevision=66 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=5763e644-7ed2-4216-a7a4-25020d63fb56 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5763e644-7ed2-4216-a7a4-25020d63fb56 policyRevision=68 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=87deed5d-6352-46ee-860e-b5cf2630bbe2 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=87deed5d-6352-46ee-860e-b5cf2630bbe2 policyRevision=70 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=59969fbf-e65d-4a24-9de7-e88b92bf4f0f subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=59969fbf-e65d-4a24-9de7-e88b92bf4f0f policyRevision=72 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=19f848ef-640c-4902-97f9-feaf57e7fc12 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=19f848ef-640c-4902-97f9-feaf57e7fc12 policyRevision=74 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=d25069e3-e555-41e3-a0e2-2ca9cba5895a subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d25069e3-e555-41e3-a0e2-2ca9cba5895a policyRevision=76 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=388454d2-58f2-4e52-9a3c-af8c54f5efad subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=388454d2-58f2-4e52-9a3c-af8c54f5efad policyRevision=78 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-20T22:52:49Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:50Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-h5gc9 obj="10.245.119.204:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-d8z46 obj="10.245.21.144:8089/ANY" subsys=k8s-watcher
time="2025-05-20T23:26:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T23:26:22Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=d39d62b8-f716-4cc8-ac1f-a23217085392 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d39d62b8-f716-4cc8-ac1f-a23217085392 policyRevision=80 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=861a1e45-dd14-4122-bf1f-a6cfa97664e4 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=861a1e45-dd14-4122-bf1f-a6cfa97664e4 policyRevision=82 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=b1678a77-8b1e-4f31-8173-2ce2c9f953bb subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b1678a77-8b1e-4f31-8173-2ce2c9f953bb policyRevision=84 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dde50b09-1a02-4069-887e-ad25f66ef42f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dde50b09-1a02-4069-887e-ad25f66ef42f policyRevision=86 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=2e1293f3-727b-43d1-8a57-1f590a798b9f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2e1293f3-727b-43d1-8a57-1f590a798b9f policyRevision=88 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=4c9c715a-2c91-4e21-a839-861ec97de0e9 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4c9c715a-2c91-4e21-a839-861ec97de0e9 policyRevision=90 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=3496cede-bbe2-4cf2-8c14-e8d16efc2862 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=3496cede-bbe2-4cf2-8c14-e8d16efc2862 policyRevision=92 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-22T22:00:42Z" level=info msg="Delete endpoint request" containerID=f3e05d1c5d endpointID=1599 k8sNamespace=kube-system k8sPodName=coredns-854895db77-p9hbd subsys=daemon
time="2025-05-22T22:00:42Z" level=info msg="Releasing key" key="[k8s:doks.digitalocean.com/managed=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=coredns k8s:io.kubernetes.pod.namespace=kube-system k8s:k8s-app=kube-dns]" subsys=allocator
time="2025-05-22T22:00:42Z" level=info msg="Removed endpoint" containerID=f3e05d1c5d datapathPolicyRevision=92 desiredPolicyRevision=78 endpointID=1599 identity=30020 ipv4=10.244.1.123 ipv6= k8sPodName=kube-system/coredns-854895db77-p9hbd subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.61 b2d8cc9e-4ea8-4894-bcbe-d3ab02c29ef6 default }" containerID=dc5e48ca7d2fb015d3448f72279677556ecfb046b18362a0bf13895b9fbbfacf datapathConfiguration="&{false false false false false <nil>}" interface=lxca532b63104ac k8sPodName=kube-system/coredns-6b79676d8-9569v labels="[]" subsys=daemon sync-build=true
time="2025-05-22T22:00:42Z" level=info msg="New endpoint" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Resolving identity labels (blocking)" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-05-22T22:00:42Z" level=info msg="Identity of endpoint changed" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v oldIdentity="no identity" subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Waiting for endpoint to be generated" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Rewrote endpoint BPF program" containerID=dc5e48ca7d datapathPolicyRevision=0 desiredPolicyRevision=92 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Successful endpoint creation" containerID=dc5e48ca7d datapathPolicyRevision=92 desiredPolicyRevision=92 endpointID=356 identity=30020 ipv4=10.244.1.61 ipv6= k8sPodName=kube-system/coredns-6b79676d8-9569v subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6320599f-8831-436f-a0af-aebca63ea3b0 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6320599f-8831-436f-a0af-aebca63ea3b0 policyRevision=94 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=c1227c40-d350-45b5-8c77-fb356c789259 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c1227c40-d350-45b5-8c77-fb356c789259 policyRevision=96 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=dca1dc14-f9b6-4bc1-bc4c-34129c93a40f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dca1dc14-f9b6-4bc1-bc4c-34129c93a40f policyRevision=98 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=0daf0a66-c6ed-4b34-88f2-7c164071686f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0daf0a66-c6ed-4b34-88f2-7c164071686f policyRevision=100 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=7e246bc7-a405-4c10-96d9-67ae25b8ffe9 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7e246bc7-a405-4c10-96d9-67ae25b8ffe9 policyRevision=102 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=7d18237b-2412-49b3-885b-3fb9b36ba7ac subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7d18237b-2412-49b3-885b-3fb9b36ba7ac policyRevision=104 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=36d121f0-259e-4fc0-9f97-639eeb619115 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=36d121f0-259e-4fc0-9f97-639eeb619115 policyRevision=106 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=d02a2590-a58a-4023-891d-f2035aaee1e6 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d02a2590-a58a-4023-891d-f2035aaee1e6 policyRevision=108 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=9de6c8b4-16e9-447e-a43e-5a8afe50bef9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9de6c8b4-16e9-447e-a43e-5a8afe50bef9 policyRevision=110 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2a4d21ea-6c34-4f80-b83a-83e1a11f8ea9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2a4d21ea-6c34-4f80-b83a-83e1a11f8ea9 policyRevision=112 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=c24f1493-1c87-4ec7-bd4f-36d5f6046c4e subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c24f1493-1c87-4ec7-bd4f-36d5f6046c4e policyRevision=114 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=0c046dc9-cdeb-4072-bd15-711c728c5413 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0c046dc9-cdeb-4072-bd15-711c728c5413 policyRevision=116 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=ce2a9abe-b3e0-4fc8-9534-e0c627cc553f subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ce2a9abe-b3e0-4fc8-9534-e0c627cc553f policyRevision=118 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=e35922a8-7564-43c0-bc00-1977e288b163 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e35922a8-7564-43c0-bc00-1977e288b163 policyRevision=120 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
Kubeintel ©2024